diff --git a/ReEDS_Augur/A_prep_data.py b/ReEDS_Augur/A_prep_data.py index 52b1cf1..6bd1213 100644 --- a/ReEDS_Augur/A_prep_data.py +++ b/ReEDS_Augur/A_prep_data.py @@ -328,7 +328,14 @@ def intify(v): gdxreeds['h_szn'].drop('Value', axis=1) .merge(gdxreeds['hours'], on='allh').groupby('allszn').Value.sum()) ## Make sure the number of hours makes sense - assert int(np.around(sznhours.sum(), 0)) % 8760 == 0 + if ( + ((int(np.around(sznhours.sum(), 0)) % 8760) and int(sw.GSw_PRM_CapCredit)) + or ((int(np.around(sznhours.sum(), 0)) % 8766) and not int(sw.GSw_PRM_CapCredit)) + ): + raise ValueError( + f"sznhours.sum() = {sznhours.sum()} but should be divisible by 8760 " + "when using capacity credit and by 8766 when using stress periods" + ) ## [MWh] / [h] = [MW] (average) can_imports_avemw_rszn = ( gdxreeds['can_imports_szn_filt'].pivot(index='allszn',columns='r',values='Value') diff --git a/ReEDS_Augur/G_plots.py b/ReEDS_Augur/G_plots.py index 9ac09d7..ffef316 100644 --- a/ReEDS_Augur/G_plots.py +++ b/ReEDS_Augur/G_plots.py @@ -1,13 +1,17 @@ #%%### Imports -import os, site +import os +import site import pandas as pd import numpy as np import matplotlib as mpl import matplotlib.pyplot as plt +from matplotlib import patheffects as pe from glob import glob +import traceback +import gdxpds +import cmocean pd.options.display.max_rows = 20 pd.options.display.max_columns = 200 -import gdxpds ### Local imports try: import ReEDS_Augur.functions as functions @@ -172,11 +176,14 @@ def get_pras_system(sw, verbose=0): vals = {} for key in keys: vals[key] = list(f[key]) - if verbose: print('{}:\n {}\n'.format(key,','.join(vals[key]))) + if verbose: + print(f"{key}:\n {','.join(vals[key])}\n") for val in vals[key]: pras[key,val] = pd.DataFrame(f[key][val][...]) - if verbose: print('{}/{}: {}'.format(key,val,pras[key,val].shape)) - if verbose: print('\n') + if verbose: + print(f"{key}/{val}: {pras[key,val].shape}") + if verbose: + print('\n') ###### Combine into more easily-usable dataframes dfpras = {} @@ -184,7 +191,7 @@ def get_pras_system(sw, verbose=0): ## our name: [pras key, pras capacity table name] 'storcap': ['storages', 'dischargecapacity'], 'gencap': ['generators', 'capacity'], - 'genstorcap': ['generatorstorages', 'gridinjectioncapacity'], + # 'genstorcap': ['generatorstorages', 'gridinjectioncapacity'], } for key, val in keys.items(): dfpras[key] = pras[val[0], val[1]] @@ -666,8 +673,10 @@ def plot_b1_dispatch_usa(sw, dfs): ) leg.set_title('Generation', prop={'size':'large'}) - if savefig: plt.savefig(os.path.join(sw['savepath'],savename.replace('YEAR',str(y)))) - if interactive: plt.show() + if savefig: + plt.savefig(os.path.join(sw['savepath'],savename.replace('YEAR',str(y)))) + if interactive: + plt.show() plt.close() @@ -691,7 +700,8 @@ def plot_b1c1_profiles_usa(sw, dfs): 'energy': dfs['stor_energy'], }, axis=1).round(1) - if savefig: dfout.to_csv(os.path.join(sw['savepath'],savename)) + if savefig: + dfout.to_csv(os.path.join(sw['savepath'],savename)) def plot_b1_load_duration(sw, dfs): @@ -735,8 +745,10 @@ def plot_b1_load_duration(sw, dfs): columnspacing=0.5, handletextpad=0.5, handlelength=0.75, ) plots.despine(ax) - if savefig: plt.savefig(os.path.join(sw['savepath'],savename)) - if interactive: plt.show() + if savefig: + plt.savefig(os.path.join(sw['savepath'],savename)) + if interactive: + plt.show() plt.close() @@ -783,8 +795,10 @@ def plot_b1_netload_duration(sw, dfs): columnspacing=0.5, handletextpad=0.5, handlelength=0.75, ) plots.despine(ax) - if savefig: plt.savefig(os.path.join(sw['savepath'],savename)) - if interactive: plt.show() + if savefig: + plt.savefig(os.path.join(sw['savepath'],savename)) + if interactive: + plt.show() plt.close() @@ -806,8 +820,10 @@ def plot_netload_profile(sw, dfs): dfneg, colors=['C0'], f=f, ax=ax, ) - if savefig: plt.savefig(os.path.join(sw['savepath'],savename)) - if interactive: plt.show() + if savefig: + plt.savefig(os.path.join(sw['savepath'],savename)) + if interactive: + plt.show() plt.close() @@ -932,7 +948,7 @@ def plot_dispatch_maxmin_netload_weeks(sw, dfs): ax[row*2+1].set_title( '{} min'.format(region), x=0.01, ha='left', va='top', y=1.0, weight='bold') ## Legend - leg = ax[0].legend( + ax[0].legend( loc='upper left', bbox_to_anchor=(1.02,1), fontsize=11, frameon=False, ncol=1, columnspacing=0.5, handletextpad=0.3, handlelength=0.7, @@ -940,8 +956,10 @@ def plot_dispatch_maxmin_netload_weeks(sw, dfs): ax[0].set_ylabel('Generation [GW]', ha='right', y=1) plots.despine(ax) - if savefig: plt.savefig(os.path.join(sw['savepath'],savename)) - if interactive: plt.show() + if savefig: + plt.savefig(os.path.join(sw['savepath'],savename)) + if interactive: + plt.show() plt.close() @@ -965,8 +983,10 @@ def plot_b1_storage_energy(sw, dfs): title='Energy level', columnspacing=0.5, handletextpad=0.3, handlelength=0.7, ) - if savefig: plt.savefig(os.path.join(sw['savepath'],savename)) - if interactive: plt.show() + if savefig: + plt.savefig(os.path.join(sw['savepath'],savename)) + if interactive: + plt.show() plt.close() @@ -1002,8 +1022,10 @@ def plot_c1_curtailment_timeseries(sw, dfs): fontsize=11, frameon=False, ncol=1, columnspacing=0.5, handletextpad=0.3, handlelength=0.7, ) - if savefig: plt.savefig(os.path.join(sw['savepath'],savename)) - if interactive: plt.show() + if savefig: + plt.savefig(os.path.join(sw['savepath'],savename)) + if interactive: + plt.show() plt.close() @@ -1039,8 +1061,10 @@ def plot_c1_load_netload_curtailment_profile(sw, dfs): ) ax.set_ylabel('TW (national total)') plots.despine(ax) - if savefig: plt.savefig(os.path.join(sw['savepath'],savename)) - if interactive: plt.show() + if savefig: + plt.savefig(os.path.join(sw['savepath'],savename)) + if interactive: + plt.show() plt.close() @@ -1090,8 +1114,10 @@ def plot_c1_curtailment_duration(sw, dfs): columnspacing=0.5, handletextpad=0.3, handlelength=0.75, ) plots.despine(ax) - if savefig: plt.savefig(os.path.join(sw['savepath'],savename)) - if interactive: plt.show() + if savefig: + plt.savefig(os.path.join(sw['savepath'],savename)) + if interactive: + plt.show() plt.close() @@ -1121,8 +1147,10 @@ def plot_b1_dropped_load_timeseries(sw, dfs): fontsize=11, frameon=False, ncol=1, columnspacing=0.5, handletextpad=0.3, handlelength=0.7, ) - if savefig: plt.savefig(os.path.join(sw['savepath'],savename)) - if interactive: plt.show() + if savefig: + plt.savefig(os.path.join(sw['savepath'],savename)) + if interactive: + plt.show() plt.close() @@ -1163,8 +1191,10 @@ def plot_b1_dropped_load_timeseries_full(sw, dfs): ax[0].set_ylim(0) ax[len(wys)-1].set_ylabel('Dropped load [GW]', y=0, ha='left') plots.despine(ax) - if savefig: plt.savefig(os.path.join(sw['savepath'],savename)) - if interactive: plt.show() + if savefig: + plt.savefig(os.path.join(sw['savepath'],savename)) + if interactive: + plt.show() plt.close() @@ -1188,8 +1218,10 @@ def plot_b1_h2dac_load_timeseries(sw, dfs): dfplot.loc[str(y)].rename('H2/DAC\ndemand').abs().to_frame(), colors=['C9'], dpi=dpi, ) - if savefig: plt.savefig(os.path.join(sw['savepath'],savename)) - if interactive: plt.show() + if savefig: + plt.savefig(os.path.join(sw['savepath'],savename)) + if interactive: + plt.show() plt.close() @@ -1241,8 +1273,10 @@ def plot_b1_dropped_load_duration(sw, dfs): columnspacing=0.5, handletextpad=0.3, handlelength=0.7, ) plots.despine(ax) - if savefig: plt.savefig(os.path.join(sw['savepath'],savename)) - if interactive: plt.show() + if savefig: + plt.savefig(os.path.join(sw['savepath'],savename)) + if interactive: + plt.show() plt.close() @@ -1311,7 +1345,7 @@ def map_dropped_load(sw, dfs, level='r'): ### Background dfba.plot(ax=ax, facecolor='none', edgecolor='k', lw=0.2) ### Data - dfplot.plot(ax=ax, column='val', cmap=plt.cm.gist_earth_r) + dfplot.plot(ax=ax, column='val', cmap=cmocean.cm.rain) for r, row in dfplot.iterrows(): if row.val > 0: ax.annotate( @@ -1324,8 +1358,10 @@ def map_dropped_load(sw, dfs, level='r'): ax.annotate(r, (row.labelx, row.labely), ha='center', va='bottom', fontsize=6, color='C7') ax.axis('off') - if savefig: plt.savefig(os.path.join(sw['savepath'],savename)) - if interactive: plt.show() + if savefig: + plt.savefig(os.path.join(sw['savepath'],savename)) + if interactive: + plt.show() plt.close() @@ -1339,8 +1375,12 @@ def plot_pras_ICAP(sw, dfs): ### Collect the PRAS system capacities gencap = dfs['pras_system']['gencap'].groupby(axis=1, level=0).sum() storcap = dfs['pras_system']['storcap'].groupby(axis=1, level=0).sum() - genstorcap = dfs['pras_system']['genstorcap'].groupby(axis=1, level=0).sum() - cap = pd.concat([gencap, storcap, genstorcap], axis=1) + # genstorcap = dfs['pras_system']['genstorcap'].groupby(axis=1, level=0).sum() + cap = pd.concat([ + gencap, + storcap, + # genstorcap, + ], axis=1) ## Drop any empties cap = cap.replace(0,np.nan).dropna(axis=1, how='all').fillna(0).astype(int) ## Get the colors @@ -1387,8 +1427,10 @@ def plot_pras_ICAP(sw, dfs): ) leg.set_title(f'ICAP {y}', prop={'size':'large'}) ## Save it - if savefig: plt.savefig(os.path.join(sw['savepath'],savename)) - if interactive: plt.show() + if savefig: + plt.savefig(os.path.join(sw['savepath'],savename)) + if interactive: + plt.show() plt.close() @@ -1405,10 +1447,14 @@ def plot_augur_pras_capacity(sw, dfs): ### Collect the PRAS system capacities gencap = dfs['pras_system']['gencap'] storcap = dfs['pras_system']['storcap'] - genstorcap = dfs['pras_system']['genstorcap'] - load = dfs['pras_system']['load'] / 1e3 + # genstorcap = dfs['pras_system']['genstorcap'] + # load = dfs['pras_system']['load'] / 1e3 cap = {} - cap['pras'] = pd.concat([gencap, storcap, genstorcap], axis=1) / 1e3 + cap['pras'] = pd.concat([ + gencap, + storcap, + # genstorcap, + ], axis=1) / 1e3 ## Drop any empties cap['pras'] = cap['pras'].replace(0,np.nan).dropna(axis=1, how='all').fillna(0) ## Aggregate by type @@ -1451,7 +1497,7 @@ def plot_augur_pras_capacity(sw, dfs): ) alltechs = set() for r in zones: - df = pd.concat({'A':cap['augur'].loc[r], 'P':cap['pras'].loc[r]}, axis=1).T + df = pd.concat({'A':cap['augur'].get(r,pd.Series()), 'P':cap['pras'].get(r,pd.Series())}, axis=1).T order = [c for c in tech_style.index if c in df] missing = [c for c in df if c not in order] if len(missing): @@ -1479,8 +1525,10 @@ def plot_augur_pras_capacity(sw, dfs): ax[-1, 0].set_ylabel('Nameplate capacity [GW]', y=0, ha='left') plots.despine(ax) ## Save it - if savefig: plt.savefig(os.path.join(sw['savepath'],savename)) - if interactive: plt.show() + if savefig: + plt.savefig(os.path.join(sw['savepath'],savename)) + if interactive: + plt.show() plt.close() @@ -1501,9 +1549,13 @@ def plot_pras_ICAP_regional(sw, dfs, numdays=5): ### Collect the PRAS system capacities gencap = dfs['pras_system']['gencap'] storcap = dfs['pras_system']['storcap'] - genstorcap = dfs['pras_system']['genstorcap'] + # genstorcap = dfs['pras_system']['genstorcap'] load = dfs['pras_system']['load'] / 1e3 - cap = pd.concat([gencap, storcap, genstorcap], axis=1) / 1e3 + cap = pd.concat([ + gencap, + storcap, + # genstorcap, + ], axis=1) / 1e3 ## Drop any empties cap = cap.replace(0,np.nan).dropna(axis=1, how='all').fillna(0) ## Get the colors @@ -1558,8 +1610,10 @@ def renamer(x): ax[-1, 0].set_ylabel('ICAP [GW]', y=0, ha='left') plots.despine(ax) ## Save it - if savefig: plt.savefig(os.path.join(sw['savepath'],savename)) - if interactive: plt.show() + if savefig: + plt.savefig(os.path.join(sw['savepath'],savename)) + if interactive: + plt.show() plt.close() @@ -1572,11 +1626,16 @@ def plot_pras_unitsize_distribution(sw, dfs): return savename = f"B1-PRAS-unitcap-{sw['t']}.png" import plots + # import reedsplots gencap = dfs['pras_system']['gencap'] storcap = dfs['pras_system']['storcap'] - genstorcap = dfs['pras_system']['genstorcap'] + # genstorcap = dfs['pras_system']['genstorcap'] cap = ( - pd.concat([gencap, storcap, genstorcap], axis=1) + pd.concat([ + gencap, + storcap, + # genstorcap, + ], axis=1) .max().rename('MW').reset_index() ) ## Get the colors @@ -1584,13 +1643,17 @@ def plot_pras_unitsize_distribution(sw, dfs): tech_map.index = tech_map.index.str.lower() tech_map = tech_map.str.lower() tech_style = dfs['tech_style'].copy() + toadd = tech_style.loc[tech_style.index.str.endswith('_mod')] + toadd.index = toadd.index.str.replace('_mod','') + tech_style = pd.concat([tech_style,toadd]) ## Aggregate by type cap.i = cap.i.map( lambda x: x if x.startswith('battery') else (x.strip('_01234567890*').lower())) cap.i = ( cap.i .map(lambda x: tech_map.get(x,x)) - .map(lambda x: ([i for i in tech_map.drop('hyd').index if x.startswith(i)]+[x])[0]) + .str.replace('_upgrade','') + .str.replace('_mod','') ) if 'new_blank_genstor' in cap.i.values: if (cap.loc[cap.i=='new_blank_genstor','MW'] == 0).all(): @@ -1605,21 +1668,26 @@ def plot_pras_unitsize_distribution(sw, dfs): ) order = [i for i in tech_style.index if i in techs] others = [i for i in techs if ((i not in order) and (i not in nondisaggtechs))] - for i in others: - tech_style[i] = 'k' + # for i in others: + # tech_style[i] = 'k' + ylabel = {0: {'scale':1, 'units':'MW'}, 1: {'scale':1e-3, 'units':'GW'}} plt.close() - f,ax = plt.subplots(1,2,figsize=(7,3.75)) + f,ax = plt.subplots(1, 2, figsize=(7,3.75), gridspec_kw={'wspace':0.4}) for i in (order+others)[::-1]: - df = cap.loc[cap.i==i] + df = cap.loc[cap.i==i].copy() col = 1 if i in nondisaggtechs else 0 + df.MW = df.MW * ylabel[col]['scale'] ax[col].plot( - range(len(df)), df.MW.sort_values().values, c=tech_style[i], label=i) + range(len(df)), df.MW.sort_values().values, + c=tech_style.get(i,'k'), label=i) ax[col].annotate( - f' {i}', (len(df), df.MW.max()), fontsize=10, + f' {i}', (len(df), df.MW.max()), + fontsize=10, color=tech_style.get(i,'k'), + path_effects=[pe.withStroke(linewidth=1.5, foreground='w', alpha=0.7)], ) for col in range(2): - ax[col].set_ylabel('Unit size [MW]') + ax[col].set_ylabel(f"Unit size [{ylabel[col]['units']}]") ax[col].set_xlabel('Number of units') ax[col].set_xlim(0) ax[col].set_ylim(0) @@ -1631,8 +1699,10 @@ def plot_pras_unitsize_distribution(sw, dfs): ax[1].set_title('Aggregated techs (PRAS FOR=0)') plots.despine(ax) ## Save it - if savefig: plt.savefig(os.path.join(sw['savepath'],savename)) - if interactive: plt.show() + if savefig: + plt.savefig(os.path.join(sw['savepath'],savename)) + if interactive: + plt.show() plt.close() @@ -1658,10 +1728,13 @@ def plot_b1_prices(sw, dfs): '({}) (y = {:.0f}–{:.0f} $/MWh)'.format( sw['t'], dfs['prices'].min().min(), ymax)) - if savefig: plt.savefig(os.path.join( - sw['savepath'],savename.replace('MAX',str(ymax)).replace('YEAR',str(y)) - )) - if interactive: plt.show() + if savefig: + plt.savefig(os.path.join( + sw['savepath'], + savename.replace('MAX',str(ymax)).replace('YEAR',str(y)) + )) + if interactive: + plt.show() plt.close() @@ -1693,8 +1766,10 @@ def plot_b1_price_duration(sw, dfs): ax.xaxis.set_minor_locator(mpl.ticker.MultipleLocator(5)) ax.axhline(0,c='0.5',lw=0.5,ls='--') plots.despine(ax) - if savefig: plt.savefig(os.path.join(sw['savepath'],savename)) - if interactive: plt.show() + if savefig: + plt.savefig(os.path.join(sw['savepath'],savename)) + if interactive: + plt.show() plt.close() @@ -1751,8 +1826,10 @@ def plot_b1_co2emissions(sw, dfs): ax.axhline(0,c='k',lw=0.75,ls='-',zorder=5) ax.set_ylabel('CO2 emissions') plots.despine(ax) - if savefig: plt.savefig(os.path.join(sw['savepath'],savename)) - if interactive: plt.show() + if savefig: + plt.savefig(os.path.join(sw['savepath'],savename)) + if interactive: + plt.show() plt.close() @@ -1820,8 +1897,10 @@ def plot_a_meritorder(sw, dfs): ) plots.despine(ax, right=True) - if savefig: plt.savefig(os.path.join(sw['savepath'],savename)) - if interactive: plt.show() + if savefig: + plt.savefig(os.path.join(sw['savepath'],savename)) + if interactive: + plt.show() plt.close() @@ -1877,8 +1956,10 @@ def plot_e_cc_mar(sw, dfs): f'{sw.t} {param} [fraction]', weight='bold', fontsize='x-large') plots.despine(ax) - if savefig: plt.savefig(os.path.join(sw['savepath'],savename)) - if interactive: plt.show() + if savefig: + plt.savefig(os.path.join(sw['savepath'],savename)) + if interactive: + plt.show() plt.close() @@ -1905,9 +1986,9 @@ def plot_e_netloadhours_timeseries(sw, dfs): weight='bold',fontsize='large') if row < (len(years) - 1): ax[row].set_xticklabels([]) - h,l = ax[0].get_legend_handles_labels() + handles, labels = ax[0].get_legend_handles_labels() ax[0].legend( - h[::-1], l[::-1], + handles[::-1], labels[::-1], columnspacing=0.5, handletextpad=0.3, handlelength=0.7, loc='upper left', bbox_to_anchor=(1,1), frameon=False, title='ccreg', ) @@ -1915,8 +1996,10 @@ def plot_e_netloadhours_timeseries(sw, dfs): # ax[-1].xaxis.set_major_locator(mpl.dates.MonthLocator()) # ax[-1].xaxis.set_major_formatter(mpl.dates.DateFormatter('%b')) plots.despine(ax) - if savefig: plt.savefig(os.path.join(sw['savepath'],savename)) - if interactive: plt.show() + if savefig: + plt.savefig(os.path.join(sw['savepath'],savename)) + if interactive: + plt.show() plt.close() @@ -1960,17 +2043,19 @@ def plot_e_netloadhours_histogram(sw, dfs): rotation=35, ha='right', rotation_mode='anchor') # ax[2].tick_params(labelrotation=45) ### Formatting - h,l = ax[2].get_legend_handles_labels() + handles, labels = ax[2].get_legend_handles_labels() ax[2].legend( - h[::-1], l[::-1], + handles[::-1], labels[::-1], loc='center left', bbox_to_anchor=(1,0.5), frameon=False, title=sw['capcredit_hierarchy_level'], ncol=1, columnspacing=0.5, handletextpad=0.3, handlelength=0.7, ) ax[0].set_ylabel('Net peak load instances [#]') plots.despine(ax) - if savefig: plt.savefig(os.path.join(sw['savepath'],savename)) - if interactive: plt.show() + if savefig: + plt.savefig(os.path.join(sw['savepath'],savename)) + if interactive: + plt.show() plt.close() @@ -1990,137 +2075,137 @@ def main(sw, augur_plots=1): #%% Make the plots try: plot_b1_dispatch_usa(sw, dfs) - except Exception as err: - print('plot_b1_dispatch_usa() failed:', err) + except Exception: + print('plot_b1_dispatch_usa() failed:', traceback.format_exc()) try: plot_b1c1_profiles_usa(sw, dfs) - except Exception as err: - print('plot_b1c1_profiles_usa() failed:', err) + except Exception: + print('plot_b1c1_profiles_usa() failed:', traceback.format_exc()) try: plot_b1_load_duration(sw, dfs) - except Exception as err: - print('plot_b1_load_duration() failed:', err) + except Exception: + print('plot_b1_load_duration() failed:', traceback.format_exc()) try: plot_b1_netload_duration(sw, dfs) - except Exception as err: - print('plot_b1_netload_duration() failed:', err) + except Exception: + print('plot_b1_netload_duration() failed:', traceback.format_exc()) try: plot_dispatch_maxmin_netload_weeks(sw, dfs) - except Exception as err: - print('plot_dispatch_maxmin_netload_weeks() failed:', err) + except Exception: + print('plot_dispatch_maxmin_netload_weeks() failed:', traceback.format_exc()) try: plot_c1_curtailment_duration(sw, dfs) - except Exception as err: - print('plot_c1_curtailment_duration() failed:', err) + except Exception: + print('plot_c1_curtailment_duration() failed:', traceback.format_exc()) try: plot_b1_dropped_load_timeseries(sw, dfs) - except Exception as err: - print('plot_b1_dropped_load_timeseries() failed:', err) + except Exception: + print('plot_b1_dropped_load_timeseries() failed:', traceback.format_exc()) try: plot_b1_dropped_load_timeseries_full(sw, dfs) - except Exception as err: - print('plot_b1_dropped_load_timeseries_full() failed:', err) + except Exception: + print('plot_b1_dropped_load_timeseries_full() failed:', traceback.format_exc()) try: plot_b1_dropped_load_duration(sw, dfs) - except Exception as err: - print('plot_b1_dropped_load_duration() failed:', err) + except Exception: + print('plot_b1_dropped_load_duration() failed:', traceback.format_exc()) try: for level in ['r','transreg']: map_dropped_load(sw, dfs, level=level) - except Exception as err: - print('map_dropped_load() failed:', err) + except Exception: + print('map_dropped_load() failed:', traceback.format_exc()) try: plot_pras_ICAP_regional(sw, dfs) - except Exception as err: - print('plot_pras_ICAP_regional() failed:', err) + except Exception: + print('plot_pras_ICAP_regional() failed:', traceback.format_exc()) try: plot_pras_unitsize_distribution(sw, dfs) - except Exception as err: - print('plot_pras_unitsize_distribution() failed:', err) + except Exception: + print('plot_pras_unitsize_distribution() failed:', traceback.format_exc()) try: plot_b1_price_duration(sw, dfs) - except Exception as err: - print('plot_b1_price_duration() failed:', err) + except Exception: + print('plot_b1_price_duration() failed:', traceback.format_exc()) try: plot_b1_co2emissions(sw, dfs) - except Exception as err: - print('plot_b1_co2emissions() failed:', err) + except Exception: + print('plot_b1_co2emissions() failed:', traceback.format_exc()) try: plot_e_cc_mar(sw, dfs) - except Exception as err: - print('plot_e_cc_mar() failed:', err) + except Exception: + print('plot_e_cc_mar() failed:', traceback.format_exc()) try: plot_e_netloadhours_timeseries(sw, dfs) - except Exception as err: - print('plot_e_netloadhours_timeseries() failed:', err) + except Exception: + print('plot_e_netloadhours_timeseries() failed:', traceback.format_exc()) try: plot_e_netloadhours_histogram(sw, dfs) - except Exception as err: - print('plot_e_netloadhours_histogram() failed:', err) + except Exception: + print('plot_e_netloadhours_histogram() failed:', traceback.format_exc()) if int(sw['GSw_H2']) or int(sw['GSw_DAC']): try: plot_b1_h2dac_load_timeseries(sw, dfs) - except Exception as err: - print('plot_b1_h2dac_load_timeseries() failed:', err) + except Exception: + print('plot_b1_h2dac_load_timeseries() failed:', traceback.format_exc()) if augur_plots >= 2: try: plot_augur_pras_capacity(sw, dfs) - except Exception as err: - print('plot_augur_pras_capacity() failed:', err) + except Exception: + print('plot_augur_pras_capacity() failed:', traceback.format_exc()) try: plot_pras_ICAP(sw, dfs) - except Exception as err: - print('plot_pras_ICAP() failed:', err) + except Exception: + print('plot_pras_ICAP() failed:', traceback.format_exc()) try: plot_netload_profile(sw, dfs) - except Exception as err: - print('plot_netload_profile() failed:', err) + except Exception: + print('plot_netload_profile() failed:', traceback.format_exc()) try: plot_a_meritorder(sw, dfs) - except Exception as err: - print('plot_a_meritorder() failed:', err) + except Exception: + print('plot_a_meritorder() failed:', traceback.format_exc()) try: plot_b1_storage_energy(sw, dfs) - except Exception as err: - print('plot_b1_storage_energy() failed:', err) + except Exception: + print('plot_b1_storage_energy() failed:', traceback.format_exc()) try: plot_c1_load_netload_curtailment_profile(sw, dfs) - except Exception as err: - print('plot_c1_load_netload_curtailment_profile() failed:', err) + except Exception: + print('plot_c1_load_netload_curtailment_profile() failed:', traceback.format_exc()) try: plot_b1_prices(sw, dfs) - except Exception as err: - print('plot_b1_prices() failed:', err) + except Exception: + print('plot_b1_prices() failed:', traceback.format_exc()) if augur_plots >= 3: try: plot_c1_curtailment_timeseries(sw, dfs) - except Exception as err: - print('plot_c1_curtailment_timeseries() failed:', err) + except Exception: + print('plot_c1_curtailment_timeseries() failed:', traceback.format_exc()) #%%### PROCEDURE @@ -2142,21 +2227,23 @@ def main(sw, augur_plots=1): iteration = args.iteration # #%%### Inputs for debugging - # reeds_path = os.path.expanduser('~/github/ReEDS-2.0/') + # reeds_path = os.path.expanduser('~/github2/ReEDS-2.0/') # casedir = ( - # '/Volumes/ReEDS/FY22-NTP/Candidates/Archive/ReEDSruns/' - # '20230730/v20230730_ntpH0_AC_DemMd_90by2035EP__core') - # casedir = os.path.join(reeds_path,'runs','v20231115_capcreditM0_capcredit_WECC') - # t = 2030 + # '/Volumes/ReEDS/Users/pbrown/ReEDSruns/' + # '20240112_stresspaper/20240313/v20240313_stresspaperE1_SP_DemHi_90by2035__core' + # ) + # t = 2050 # interactive = True # iteration = -1 - # sw['reeds_path'] = reeds_path - # sw['casedir'] = casedir + # augur_plots = 3 #%%### INPUTS ### Switches sw = functions.get_switches(casedir) sw['t'] = t + ## Debugging + # sw['reeds_path'] = reeds_path + # sw['casedir'] = casedir ### Derivative switches sw['_no_osprey'] = not ( @@ -2187,9 +2274,9 @@ def main(sw, augur_plots=1): # tic = datetime.datetime.now() try: main(sw) - except Exception as err: + except Exception as _err: print('G_plots.py failed with the following exception:') - print(err) + print(traceback.format_exc()) # functions.toc(tic=tic, year=t, process='ReEDS_Augur/G_plots.py') ### Remove intermediate csv files to save drive space diff --git a/ReEDS_Augur/functions.py b/ReEDS_Augur/functions.py index e337c3c..1f9bc57 100755 --- a/ReEDS_Augur/functions.py +++ b/ReEDS_Augur/functions.py @@ -6,7 +6,9 @@ """ #%%### Imports import numpy as np -import os, sys, logging +import os +import sys +import logging import pandas as pd import datetime import h5py @@ -213,7 +215,7 @@ def toc(tic, year, process, path=''): (now-tic).total_seconds() ) ) - except: + except Exception: print('meta.csv not found or not writeable') pass diff --git a/b_inputs.gms b/b_inputs.gms old mode 100755 new mode 100644 index f69ab94..b22c915 --- a/b_inputs.gms +++ b/b_inputs.gms @@ -478,6 +478,7 @@ alias(st,ast) ; alias(allt,alltt) ; alias(cendiv,cendiv2) ; alias(rscbin,arscbin) ; +alias(nercr,nercrr) ; alias(transgrp,transgrpp) ; parameter yeart(t) "numeric value for year", @@ -702,6 +703,26 @@ $elseif.pshwat %GSw_PSHwatertypes% == 2 $else.pshwat $endif.pshwat +*** Restrict valcap for hybrid storage techs based on Sw_HybridPlant switch +* 0: Ban all storage, including CSP +if(Sw_HybridPlant = 0, + ban(i)$i_subsets(i,'storage_hybrid') = yes ; +) ; +* 1: Allow CSP, ban all other storage +if(Sw_HybridPlant = 1, + ban(i)$[i_subsets(i,'storage_hybrid')$(not sameas(i,'csp_storage'))] = yes ; + ban(i)$i_subsets(i,'csp_storage') = no ; +) ; +* 2: Allow hybrid plants, excluding CSP +if(Sw_HybridPlant = 2, + ban(i)$[i_subsets(i,'storage_hybrid')$(not sameas(i,'csp_storage'))] = no ; + ban(i)$i_subsets(i,'csp_storage') = yes ; +) ; +* 3: Allow CSP and all other hybrid plants (note csp_storage bans are controlled by Sw_CSP) +if(Sw_HybridPlant = 3, + ban(i)$[i_subsets(i,'storage_hybrid')$(not sameas(i,'csp_storage'))] = no ; +) ; + *ban techs in hybrid PV+battery if the switch calls for it if(Sw_PVB=0, ban(i)$i_subsets(i,'pvb') = yes ; @@ -1116,6 +1137,7 @@ rsc_agg(i,ii)$tg_rsc_cspagg(i,ii) = yes ; rsc_agg(i,ii)$tg_rsc_upvagg(i,ii) = yes ; *All PSH types use the same supply curve rsc_agg('pumped-hydro',ii)$psh(ii) = yes ; +rsc_agg(i,ii)$[ban(i) or ban(ii)] = no ; *============================ * -- Demand flexibility setup -- @@ -1188,11 +1210,13 @@ set tmodel(t) "years to include in the model", tfix(t) "years to fix variables over when summing over previous years", tprev(t,tt) "previous modeled tt from year t", countryfeas(country) "countries included in the model" - stfeas(st) "states to include in the model" ; + stfeas(st) "states to include in the model", + tsolved(t) "years that have solved" ; *following parameters get re-defined when the solve years have been declared parameter mindiff(t) "minimum difference between t and all other tt that are in tmodel(t)" ; + tmodel(t) = no ; tfirst(t) = no ; tlast(t) = no ; @@ -1200,6 +1224,7 @@ countryfeas(country) = no ; tfix(t) = no ; stfeas(st) = no ; tprev(t,tt) = no ; +tsolved(t) = no ; *============================== @@ -1354,7 +1379,7 @@ $onlisting ptc_value_scaled(i,v,t)$[i_water_cooling(i)$Sw_WaterMain] = sum{ii$ctt_i_ii(i,ii), ptc_value_scaled(ii,v,t) } ; -parameter firstyear_v(i,v) "flag for first year of a new vintage" ; +parameter firstyear_v(i,v) "flag for first year that a new new vintage can be built" ; firstyear_v(i,v) = sum{t$[yeart(t)=smin(tt$ivt(i,v,tt),yeart(tt))],yeart(t) } ; @@ -1985,6 +2010,22 @@ $onlisting * h2_exogenous_demand.csv is in million tonnes so convert to tonnes h2_exogenous_demand(p,t) = 1e6 * h2_exogenous_demand(p,t) ; +scalar h2_demand_start "--year-- first year that h2 demand should be modeled" + h2_gen_firstyear "--year-- first year that h2 generation technologies are available" +; + +* Identify the first year that hydrogen generation technologies are allowed +h2_gen_firstyear = smin{i$[h2_ct(i)$(not ban(i))], firstyear(i) } ; + +* Set h2_demand_start to the first year that there is data +* in h2_exogenous_demand +h2_demand_start = smin{t$[sum{p, h2_exogenous_demand(p,t)}], yeart(t) } ; + +* If h2_gen_firstyear is smaller than h2_demand_start, set h2_demand_start +* to be h2_gen_firstyear +h2_demand_start$[h2_gen_firstyear=Sw_UpgradeYear) $(yeart(t)>=firstyear(i)) *if it is a valid ivt combination which is duplicated from upgrade_to $sum{tt$(yeart(tt)<=yeart(t)), ivt(i,newv,tt) } - $(firstyear_v(i,newv)>=Sw_UpgradeYear) + $(yeart(t)>=Sw_UpgradeYear) ] = yes ; *remove any upgrade considerations if before the upgrade year @@ -3173,6 +3227,15 @@ routes_transgroup(transgrp,transgrpp,r,rr)$[ $(not sameas(r,rr)) ] = yes ; +set routes_nercr(nercr,nercrr,r,rr) "collection of routes between nercrs" ; +routes_nercr(nercr,nercrr,r,rr)$[ + sum{(t,trtype), routes(r,rr,trtype,t) } + $r_nercr(r,nercr) + $r_nercr(rr,nercrr) + $(not sameas(nercr,nercrr)) + $(not sameas(r,rr)) +] = yes ; + * --- transmission cost --- @@ -3929,6 +3992,14 @@ cost_vom_pvb_p(i,v,r,t)$pvb(i) = sum{ii$[upv(ii)$rsc_agg(ii,i)], cost_vom(ii,v, parameter cost_vom_pvb_b(i,v,r,t) "--2004$/MWh-- variable OM for the battery portion of hybrid PV+battery " ; cost_vom_pvb_b(i,v,r,t)$pvb(i) = cost_vom("battery_%GSw_pvb_dur%",v,r,t) ; +* Assign hybrid plant to have the same value as UPV +parameter cost_vom_hybrid_plant(i,v,r,t) "--2004$/MWh-- variable OM for the plant portion of hybrid" ; +cost_vom_hybrid_plant(i,v,r,t)$[storage_hybrid(i)$(not csp(i))] = sum{ii$[upv(ii)$rsc_agg(ii,i)], cost_vom(ii,v,r,t) } ; + +* Assign hybrid storage to have the same value as Battery_X +parameter cost_vom_hybrid_storage(i,v,r,t) "--2004$/MWh-- variable OM for the storage portion of hybrid" ; +cost_vom_hybrid_storage(i,v,r,t)$[storage_hybrid(i)$(not csp(i))] = cost_vom("battery_%GSw_pvb_dur%",v,r,t) ; + *upgrade vom costs for initial classes are the vom costs for that tech *plus the delta between upgrade_to and upgrade_from for the initial year cost_vom(i,initv,r,t)$[upgrade(i)$Sw_Upgrades$valcap(i,initv,r,t)] = @@ -4151,11 +4222,6 @@ fuel_price(i,r,t)$[sum{f$fuel2tech(f,i),1}$(not fuel_price(i,r,t))] = fuel_price(i,r,t)$upgrade(i) = sum{ii$upgrade_to(i,ii), fuel_price(ii,r,t) } ; -* fuel price for H2-CT is accounted for as the marginal off h2 demand equations -* and thus can be removed when Sw_H2 = 1 and the year is beyond Sw_H2_Demand_Start -* otherwise, if Sw_H2 = 0 and Sw_H2CT = 1 the model can use as much H2 as needed for H2CTs at this fuel price -fuel_price(i,r,t)$[h2_ct(i)$Sw_H2$(yeart(t)>=Sw_H2_Demand_Start)] = 0 ; - *===================================================== @@ -4432,9 +4498,20 @@ $onlisting / ; parameter prm(r,t) "planning reserve margin by BA" ; - prm(r,t) = sum{nercr$r_nercr(r,nercr), prm_nt(nercr,t) } ; +$onempty +parameter firm_transfer_limit(nercr,allt) "--MW-- limit on interregional firm transfers" +/ +$offlisting +$ondelim +$include inputs_case%ds%firm_transfer_limit.csv +$offdelim +$onlisting +/ ; +$offempty + + * =========================================================================== * Regional and temporal capital cost multipliers * =========================================================================== @@ -5168,8 +5245,8 @@ $onlisting * = [cost(PV) + cost(B) * bcr ] * cap(PV) cost_cap(i,t)$pvb(i) = (cost_cap_pvb_p(i,t) + bcr(i) * cost_cap_pvb_b(i,t)) * sum{pvb_config$pvb_agg(pvb_config,i), pvbcapmult(t,pvb_config) } ; -scalar pvb_itc_qual_frac "--fraction-- fraction of energy that must be charge from local PV for hybrid PV+battery" ; -pvb_itc_qual_frac = %GSw_PVB_ITC_Qual_Constraint% ; +scalar pvb_itc_qual_frac "--fraction-- fraction of energy that must be charged from local PV for hybrid PV+battery" ; +pvb_itc_qual_frac = %GSw_PVB_Charge_Constraint% ; * --- CSP with storage --- @@ -5416,7 +5493,7 @@ cost_upgrade(i,v,r,t)$[initv(v)$valcap(i,v,r,t)$sum{ii$upgrade_from(i,ii),cost_u * start with specifying upgrade_derate as zero upgrade_derate(i,v,r,t) = 0 ; -upgrade_derate(i,initv,r,t)$[upgrade(i)$ccs(i)$unitspec_upgrades(i) +upgrade_derate(i,initv,r,t)$[upgrade(i)$ccs(i)$unitspec_upgrades(i)$valcap(i,initv,r,t) $sum{ii$upgrade_from(i,ii),hintage_data(ii,initv,r,t,"wCCS_Retro_HR") }] = * following calculation is from NEMS/EIA - stating the derate is 1 - [the original heat_rate] / [new heat rate] * take the max of it and zero @@ -5425,12 +5502,17 @@ upgrade_derate(i,initv,r,t)$[upgrade(i)$ccs(i)$unitspec_upgrades(i) * set upgrade derate for new plants and existing plants without data * to the average across all values from NETL CCRD: * https://www.osti.gov/servlets/purl/1887588 -upgrade_derate(i,initv,r,t)$[upgrade(i)$ccs(i)$coal(i)$(not upgrade_derate(i,initv,r,t))] = 0.29 ; -upgrade_derate(i,initv,r,t)$[upgrade(i)$ccs(i)$gas(i)$(not upgrade_derate(i,initv,r,t))] = 0.14 ; +upgrade_derate(i,initv,r,t)$[upgrade(i)$ccs(i)$coal(i) + $(not upgrade_derate(i,initv,r,t)) + $valcap(i,initv,r,t)] = 0.29 ; + +upgrade_derate(i,initv,r,t)$[upgrade(i)$ccs(i)$gas(i) + $(not upgrade_derate(i,initv,r,t)) + $valcap(i,initv,r,t)] = 0.14 ; * same assumptions for new plants -upgrade_derate(i,newv,r,t)$[upgrade(i)$ccs(i)$coal(i)] = 0.29 ; -upgrade_derate(i,newv,r,t)$[upgrade(i)$ccs(i)$gas(i)] = 0.14 ; +upgrade_derate(i,newv,r,t)$[upgrade(i)$ccs(i)$coal(i)$valcap(i,newv,r,t)] = 0.29 ; +upgrade_derate(i,newv,r,t)$[upgrade(i)$ccs(i)$gas(i)$valcap(i,newv,r,t)] = 0.14 ; if((not Sw_UpgradeDerate), upgrade_derate(i,v,r,t) = 0 @@ -5952,8 +6034,8 @@ Parameter load_exog_flex(flex_type,r,allh,t) "the amount of exogenous load that is flexibile" load_exog_static(r,allh,t) "the amount of exogenous load that is static" * Demand response - dr_inc(i,r,allh) "--fraction-- average capacity factor for dr reduction in load in timeslice h" - dr_dec(i,r,allh) "--fraction-- average capacity factor for dr increase in load in timeslice h" + dr_increase(i,r,allh) "--fraction-- average capacity factor for dr reduction in load in timeslice h" + dr_decrease(i,r,allh) "--fraction-- average capacity factor for dr increase in load in timeslice h" allowed_shifts(i,allh,allh) "how much load each dr type is allowed to shift into h from hh" * EVMC storage evmc_storage_discharge_frac(i,r,allh,allt) "--fraction-- fraction of adopted EV storage discharge capacity that can be discharged (deferred charging) in each timeslice h" diff --git a/c_supplymodel.gms b/c_supplymodel.gms old mode 100755 new mode 100644 index 8b71b4c..a8dba2b --- a/c_supplymodel.gms +++ b/c_supplymodel.gms @@ -32,22 +32,21 @@ positive variables UPGRADES(i,v,r,t) "--MW-- investments in upgraded capacity from ii to i" UPGRADES_RETIRE(i,v,r,t) "--MW-- upgrades that have been retired - used as a free slack variable in eq_cap_upgrade" -* The units for all of the operatinal variables are average MW or MWh/time-slice hours +* The units for all of the operational variables are average MW or MWh/time-slice hours * generation and storage variables GEN(i,v,r,allh,t) "--MW-- electricity generation (post-curtailment) in hour h" - GEN_PVB_P(i,v,r,allh,t) "--MW-- average PV generation from hybrid PV+Battery in hour h" - GEN_PVB_B(i,v,r,allh,t) "--MW-- average Battery generation (discharge) from hybrid PV+Battery in hour h" + GEN_PLANT(i,v,r,allh,t) "--MW-- average plant generation from hybrid generation/storage technologies in hour h" + GEN_STORAGE(i,v,r,allh,t) "--MW-- average generation from hybrid storage technologies in hour h" + STORAGE_IN_PLANT(i,v,r,allh,t) "--MW-- hybrid plant storage charging in hour h that is charging from a coupled technology" + STORAGE_IN_GRID(i,v,r,allh,t) "--MW-- hybrid plant storage charging in hour h that is charging from the grid" AVAIL_SITE(x,allh,t) "--MW-- available generation from all resources at reV site x" CURT(r,allh,t) "--MW-- curtailment from vre generators in hour h" MINGEN(r,allszn,t) "--MW-- minimum generation level in each season" STORAGE_IN(i,v,r,allh,t) "--MW-- storage charging in hour h that is charging from a given source technology; not used for CSP-TES" - STORAGE_IN_PVB_P(i,v,r,allh,t) "--MW-- PV+Battery storage charging in hour h that is charging from a coupled PV technology" - STORAGE_IN_PVB_G(i,v,r,allh,t) "--MW-- PV+Battery storage charging in hour h that is charging from a source on the grid" STORAGE_LEVEL(i,v,r,allh,t) "--MWh per day-- storage level in hour h" DR_SHIFT(i,v,r,allh,allhh,t) "--MWh-- annual demand response load shifted to timeslice h from timeslice hh" DR_SHED(i,v,r,allh,t) "--MWh-- annual demand response load shed from timeslice h" - RAMPUP(i,v,r,allh,allhh,t) "--MW-- upward change in generation from h to hh" - RAMPDOWN(i,v,r,allh,allhh,t) "--MW-- downward change in generation from h to hh" + RAMPUP(i,r,allh,allhh,t) "--MW-- upward change in generation from h to hh" * flexible CCS variables CCSFLEX_POW(i,v,r,allh,t) "--avg MW-- average power consumed for CCS system" @@ -171,7 +170,7 @@ EQUATION eq_mingen_lb(r,allh,allszn,t) "--MW-- lower bound on minimum generation level" eq_mingen_ub(r,allh,allszn,t) "--MW-- upper bound on minimum generation level" eq_minloading(i,v,r,allh,allhh,t) "--MW-- minimum loading across same-season hours" - eq_ramping(i,v,r,allh,allhh,t) "--MW-- definition of RAMPUP and RAMPDOWN" + eq_ramping(i,r,allh,allhh,t) "--MW-- definition of RAMPUP" eq_reserve_margin(r,ccseason,t) "--MW-- planning reserve margin requirement" eq_supply_demand_balance(r,allh,t) "--MW-- supply demand balance" eq_vsc_flow(r,allh,t) "--MW-- DC power flow" @@ -191,7 +190,7 @@ EQUATION eq_state_cap(st,t) "--metric tons CO2-- state-level CO2 cap constraint -- used to represent California cap and trade program" eq_CSAPR_Budget(csapr_group,t) "--MT NOx-- CSAPR trading group emissions cannot exceed the budget cap" eq_CSAPR_Assurance(st,t) "--MT NOx-- CSAPR state emissions cannot exceed the assurance cap" - eq_BatteryMandate(st,t) "--MW-- Battery storage capacity must be greater than indicated level" + eq_BatteryMandate(st,t) "--MW-- battery storage capacity must be greater than indicated level" eq_cdr_cap(t) "--metric tons CO2-- CO2 removal (DAC and BECCS) can only offset emissions from fossil+CCS and methane leakage" * RPS Policy equations @@ -256,6 +255,8 @@ EQUATION eq_CAPTRAN_GRP(transgrp,transgrpp,t) "--MW-- combined flow capacity between transmission groups" eq_transgrp_limit_energy(transgrp,transgrpp,allh,t) "--MW-- limit on combined interface energy flows" eq_transgrp_limit_prm(transgrp,transgrpp,ccseason,t) "--MW-- limit on combined interface PRM flows" + eq_firm_transfer_limit(nercr,allh,t) "--MW-- limit net firm capacity imports into NERC regions when using stress periods" + eq_firm_transfer_limit_cc(nercr,ccseason,t) "--MW-- limit net firm capacity imports into NERC regions when using capacity credit" * storage-specific equations eq_storage_capacity(i,v,r,allh,t) "--MW-- Second storage capacity constraint in addition to eq_capacity_limit" @@ -276,10 +277,10 @@ EQUATION eq_dr_max_increase(i,v,r,allh,t) "--MW-- maximum allowed increase of load from demand response in timeslice h" eq_dr_gen(i,v,r,allh,t) "--MW-- link demand response shifting to generation" -* hybrid PV+battery equations - eq_pvb_total_gen(i,v,r,allh,t) "--MW-- generation post curtailment = generation from pv (post curtailment) + generation from battery - charging from PV" - eq_pvb_array_energy_limit(i,v,r,allh,t) "--MW-- PV energy to storage (no curtailment recovery) + PV energy to inverter <= PV resource" - eq_pvb_inverter_limit(i,v,r,allh,t) "--MW-- energy moving through the inverter cannot exceed the inverter capacity" +* hybrid plant equations + eq_plant_total_gen(i,v,r,allh,t) "--MW-- generation post curtailment = generation from pv (post curtailment) + generation from battery - charging from PV" + eq_hybrid_plant_energy_limit(i,v,r,allh,t) "--MW-- PV energy to storage (no curtailment recovery) + PV energy to inverter <= PV resource" + eq_plant_capacity_limit(i,v,r,allh,t) "--MW-- energy moving through the inverter cannot exceed the inverter capacity" eq_pvb_itc_charge_reqt(i,v,r,t) "--MWh-- total energy charged from local PV >= ITC qualification fraction * total energy charged" * Canadian imports balance @@ -333,10 +334,10 @@ eq_loadcon(r,h,t)$tmodel(t).. *[plus] load shifted from other timeslices + sum{flex_type, FLEX(flex_type,r,h,t) }$Sw_EFS_flex -*[plus] Load created by production activities - only tracked during weighted (non-stress) hours +*[plus] Load created by production activities - only tracked during representative hours * [tonne/hour] / [tonne/MWh] = [MW] + sum{(p,i,v)$[consume(i)$valcap(i,v,r,t)$i_p(i,p)$(not sameas(i,"dac_gas"))], - PRODUCE(p,i,v,r,h,t) / prod_conversion_rate(i,v,r,t) }$[Sw_Prod$hours(h)] + PRODUCE(p,i,v,r,h,t) / prod_conversion_rate(i,v,r,t) }$[Sw_Prod$h_rep(h)] *[plus] load for compressors associated with hydrogen storage injections or withdrawals * tonnes/hour * MWh/tonnes = MW @@ -564,7 +565,7 @@ eq_cap_init_retmo(i,v,r,t)$[valcap(i,v,r,t)$tmodel(t)$initv(v)$(not upgrade(i)) eq_cap_new_noret(i,v,r,t)$[valcap(i,v,r,t)$tmodel(t)$newv(v)$(not upgrade(i)) $(not retiretech(i,v,r,t))].. - sum{tt$[inv_cond(i,v,r,t,tt)$(tmodel(tt) or tfix(tt))], + sum{tt$[inv_cond(i,v,r,t,tt)$(tmodel(tt) or tfix(tt))$valcap(i,v,r,tt)], degrade(i,tt,t) * (INV(i,v,r,tt) + INV_REFURB(i,v,r,tt)$[refurbtech(i)$Sw_Refurb]) } @@ -591,7 +592,7 @@ eq_cap_new_noret(i,v,r,t)$[valcap(i,v,r,t)$tmodel(t)$newv(v)$(not upgrade(i)) eq_cap_new_retub(i,v,r,t)$[valcap(i,v,r,t)$tmodel(t)$newv(v)$(not upgrade(i)) $retiretech(i,v,r,t)].. - sum{tt$[inv_cond(i,v,r,t,tt)$(tmodel(tt) or tfix(tt))], + sum{tt$[inv_cond(i,v,r,t,tt)$(tmodel(tt) or tfix(tt))$valcap(i,v,r,tt)], degrade(i,tt,t) * (INV(i,v,r,tt) + INV_REFURB(i,v,r,tt)$[refurbtech(i)$Sw_Refurb]) } @@ -673,7 +674,7 @@ eq_cap_upgrade(i,v,r,t)$[valcap(i,v,r,t)$upgrade(i)$Sw_Upgrades$tmodel(t)].. * without peristent upgrades, all upgrades correspond to their original bintage sum{(tt)$[(tfix(tt) or tmodel(tt))$(yeart(tt)<=yeart(t))$(yeart(tt)>=Sw_Upgradeyear) - $valcap(i,v,r,tt)], + $valcap(i,v,r,tt)$sum{ii$upgrade_from(i,ii),valcap(ii,v,r,tt)}], UPGRADES(i,v,r,tt) }$[Sw_Upgrades=1] * all previous years upgrades converted to new bintages of the present year @@ -927,7 +928,7 @@ eq_spurclip(x,h,t) * --------------------------------------------------------------------------- * If spur-line sharing is disabled, the capacity of the spur line for site x -* must be >= the capacity of the hybrid reosurces (wind and solar) installed at site x +* must be >= the capacity of the hybrid resources (wind and solar) installed at site x eq_spur_noclip(x,t) $[Sw_SpurScen $(not Sw_SpurShare) @@ -953,14 +954,14 @@ eq_spur_noclip(x,t) *capacity must be greater than supply *dispatchable hydro is accounted for both in this constraint and in eq_dhyd_dispatch -*this constraint does not apply to storage nor hybrid PV+Battery -* limits for storage (including storage of hybrid PV+Battery) are tracked in eq_storage_capacity -* limits for PV of Hybrid PV+Battery are tracked in eq_pvb_energy_balance +*this constraint does not apply to storage nor hybrid plant +* limits for storage (including storage of hybrid plants) are tracked in eq_storage_capacity +* limits for plant of Hybrid Plant are tracked in eq_plant_energy_balance * limits for hybrid techs with shared spur lines are treated in eq_capacity_limit_hybrid eq_capacity_limit(i,v,r,h,t) $[tmodel(t)$valgen(i,v,r,t) $(not spur_techs(i)) - $(not storage_standalone(i))$(not pvb(i))$(not nondispatch(i))].. + $(not storage_standalone(i))$(not storage_hybrid(i)$(not csp(i)))$(not nondispatch(i))].. *total amount of dispatchable, non-hydro capacity avail(i,h)$[dispatchtech(i)$(not hydro_d(i))] @@ -1047,21 +1048,21 @@ eq_capacity_limit_nd(i,v,r,h,t)$[tmodel(t)$valgen(i,v,r,t)$nondispatch(i)].. eq_curt_gen_balance(r,h,t)$tmodel(t).. *total potential generation - sum{(i,v)$[valcap(i,v,r,t)$(vre(i) or pvb(i))$(not nondispatch(i))], + sum{(i,v)$[valcap(i,v,r,t)$(vre(i) or storage_hybrid(i)$(not csp(i)))$(not nondispatch(i))], m_cf(i,v,r,h,t) * CAP(i,v,r,t) } *[minus] curtailed generation - - CURT(r,h,t) + - CURT(r,h,t)$Sw_CurtMarket =g= -*must exceed realized generation; exclude hybrid PV+Batttery +*must exceed realized generation; exclude hybrid plants sum{(i,v)$[valgen(i,v,r,t)$vre(i)$(not nondispatch(i))], GEN(i,v,r,h,t) } -*[plus] realized PV generation from hybrid PV+Batttery - + sum{(i,v)$[valgen(i,v,r,t)$pvb(i)$(not nondispatch(i))], GEN_PVB_P(i,v,r,h,t) }$Sw_PVB +*[plus] realized generation from hybrid plant + + sum{(i,v)$[valgen(i,v,r,t)$storage_hybrid(i)$(not csp(i))$(not nondispatch(i))], GEN_PLANT(i,v,r,h,t) }$Sw_HybridPlant -*[plus] sum of operating reserves by type; exclude hybrid PV+Batttery because the PV does not provide reserves +*[plus] sum of operating reserves by type + sum{(ortype,i,v)$[Sw_OpRes$reserve_frac(i,ortype)$opres_h(h)$valgen(i,v,r,t)$vre(i)$(not nondispatch(i))$opres_model(ortype)], OPRES(ortype,i,v,r,h,t) } ; @@ -1112,17 +1113,19 @@ eq_mingen_ub(r,h,szn,t)$[h_szn(h,szn)$(yeart(t)>=mingen_firstyear) *requirement for fleet of a given tech to have a minimum annual capacity factor eq_min_cf(i,r,t)$[minCF(i,t)$tmodel(t)$valgen_irt(i,r,t)$Sw_MinCF].. - sum{(v,h)$[valgen(i,v,r,t)], hours(h) * GEN(i,v,r,h,t) } + sum{(v,h)$[valgen(i,v,r,t)$h_rep(h)], hours(h) * GEN(i,v,r,h,t) } =g= - sum{v$valgen(i,v,r,t), CAP(i,v,r,t) } * sum{h, hours(h) } * minCF(i,t) + sum{v$valgen(i,v,r,t), CAP(i,v,r,t) } * sum{h$h_rep(h), hours(h) } * minCF(i,t) ; * --------------------------------------------------------------------------- * Seasonal energy constraint for dispatchable hydropower when all energy must be used within season (no seasonal energy shifting) -eq_dhyd_dispatch(i,v,r,szn,t)$[tmodel(t)$hydro_d(i)$valgen(i,v,r,t)$(within_seas_frac(i,v,r) = 1)].. +eq_dhyd_dispatch(i,v,r,szn,t) + $[tmodel(t)$hydro_d(i)$valgen(i,v,r,t) + $(within_seas_frac(i,v,r) = 1)].. *seasonal hours [times] seasonal capacity factor [times] total hydro capacity [times] seasonal capacity adjustment sum{h$[h_szn(h,szn)], avail(i,h) * hours(h) } @@ -1150,7 +1153,7 @@ eq_dhyd_dispatch(i,v,r,szn,t)$[tmodel(t)$hydro_d(i)$valgen(i,v,r,t)$(within_seas * Annual energy constraint for dispatchable hydropower when seasonal shifting is allowed eq_dhyd_dispatch_ann(i,v,r,t)$[tmodel(t)$hydro_d(i)$valgen(i,v,r,t)$(within_seas_frac(i,v,r) < 1)].. - sum{szn, + sum{szn$szn_rep(szn), * seasonal hours [times] seasonal capacity factor sum{h$[h_szn(h,szn)], avail(i,h) * hours(h) } * [times] total hydro capacity @@ -1161,7 +1164,7 @@ eq_dhyd_dispatch_ann(i,v,r,t)$[tmodel(t)$hydro_d(i)$valgen(i,v,r,t)$(within_seas * m_cf_szn(i,v,r,szn,t) } =g= - sum{szn, + sum{szn$szn_rep(szn), *total seasonal generation plus fraction of energy for regulation sum{h$[h_szn(h,szn)], hours(h) @@ -1178,7 +1181,9 @@ eq_dhyd_dispatch_ann(i,v,r,t)$[tmodel(t)$hydro_d(i)$valgen(i,v,r,t)$(within_seas * --------------------------------------------------------------------------- * Required fraction of energy used within a season for dispatchable hydropower when seasonal shifting is allowed -eq_dhyd_dispatch_szn(i,v,r,szn,t)$[tmodel(t)$hydro_d(i)$valgen(i,v,r,t)$(within_seas_frac(i,v,r) < 1)].. +eq_dhyd_dispatch_szn(i,v,r,szn,t) + $[tmodel(t)$hydro_d(i)$valgen(i,v,r,t)$szn_rep(szn) + $(within_seas_frac(i,v,r) < 1)].. *total seasonal generation plus fraction of energy for regulation sum{h$[h_szn(h,szn)], @@ -1235,13 +1240,14 @@ eq_supply_demand_balance(r,h,t)$tmodel(t).. + (CONVERSION(r,h,"VSC","AC",t) * converter_efficiency_vsc)$[Sw_VSC$val_converter(r,t)] - (CONVERSION(r,h,"AC","VSC",t) / converter_efficiency_vsc)$[Sw_VSC$val_converter(r,t)] -* [minus] storage charging; not Hybrid PV+Battery +* [minus] storage charging; not hybrid+storage - sum{(i,v)$[valcap(i,v,r,t)$(storage_standalone(i) or hyd_add_pump(i))], STORAGE_IN(i,v,r,h,t) } -* [minus] energy into storage for hybrid pv+battery from grid - - sum{(i,v)$[valcap(i,v,r,t)$pvb(i)], STORAGE_IN_PVB_G(i,v,r,h,t) }$Sw_PVB +* [minus] energy into storage for hybrid+storage from grid + - sum{(i,v)$[valcap(i,v,r,t)$storage_hybrid(i)$(not csp(i))], STORAGE_IN_GRID(i,v,r,h,t) }$Sw_HybridPlant * [minus] load shifting from demand response +* NOTE: This is probably messed up for stress periods with nonzero hours(h) - sum{[i,v,hh]$[valgen(i,v,r,t)$dr1(i)$allowed_shifts(i,h,hh)], DR_SHIFT(i,v,r,h,hh,t) / hours(h) / storage_eff(i,t) }$Sw_DR @@ -1299,14 +1305,17 @@ eq_minloading(i,v,r,h,hh,t)$[valgen(i,v,r,t)$minloadfrac(r,i,hh) ; * RAMPUP is used in the calculation of startup/ramping costs -eq_ramping(i,v,r,h,hh,t) - $[Sw_StartCost$tmodel(t)$startcost(i)$numhours_nexth(h,hh)$valgen(i,v,r,t)].. - - GEN(i,v,r,hh,t) +* Because RAMPUP has a positive cost, RAMPUP will always either be 0 +* when the RHS is negative, or will be exactly equal to the RHS when +* the RHS is positive. +eq_ramping(i,r,h,hh,t) + $[Sw_StartCost$tmodel(t)$startcost(i)$numhours_nexth(h,hh)$valgen_irt(i,r,t)].. - =e= + RAMPUP(i,r,h,hh,t) - GEN(i,v,r,h,t) + RAMPUP(i,v,r,h,hh,t) - RAMPDOWN(i,v,r,h,hh,t) + =g= + + sum{v$valgen(i,v,r,t), GEN(i,v,r,hh,t) - GEN(i,v,r,h,t) } ; *======================================= @@ -1564,10 +1573,10 @@ eq_reserve_margin(r,ccseason,t)$[tmodel(t)$(yeart(t)>=model_builds_start_yr)$Sw_ * contribution to peak demand based on weighted-average across timeslices in each ccseason * [tonne/hour] / [tonne/MWh] * [hours] / [hours] = [MW] + (sum{(p,i,v,h)$[smr(i)$valcap(i,v,r,t)$frac_h_ccseason_weights(h,ccseason) - $(sameas(p,"H2"))$i_p(i,p)$(not sameas(i,"dac_gas"))$hours(h)], + $(sameas(p,"H2"))$i_p(i,p)$(not sameas(i,"dac_gas"))$h_rep(h)], PRODUCE(p,i,v,r,h,t) / prod_conversion_rate(i,v,r,t) * hours(h) * frac_h_ccseason_weights(h,ccseason) } - / sum{h$frac_h_ccseason_weights(h,ccseason), + / sum{h$[frac_h_ccseason_weights(h,ccseason)$h_rep(h)], hours(h) * frac_h_ccseason_weights(h,ccseason) } )$Sw_Prod @@ -1708,7 +1717,7 @@ eq_transmission_limit(r,rr,h,t,trtype)$[tmodel(t)$routes(r,rr,trtype,t)].. *[plus] operating reserve flows (operating reserves can only be transferred across AC lines) - + sum{ortype$[Sw_OpRes$opres_h(h)$aclike(trtype)$opres_routes(r,rr,t)], + + sum{ortype$[Sw_OpRes$opres_h(h)$aclike(trtype)$opres_routes(r,rr,t)$opres_model(ortype)], OPRES_FLOW(ortype,r,rr,h,t) * opres_mult } ; @@ -1780,6 +1789,54 @@ eq_transgrp_limit_prm(transgrp,transgrpp,ccseason,t) * --------------------------------------------------------------------------- +* NERC regions are only allowed to import firm capacity up to their limit +eq_firm_transfer_limit(nercr,h,t) + $[tmodel(t) + $Sw_PRMTRADE_limit + $(yeart(t)<=Sw_PRMTRADE_limit) + $h_stress(h)].. + + firm_transfer_limit(nercr,t) + + =g= + +* net transmission imports (i.e. minus exports) accounting for losses on imports +* imports [MW] + + sum{(r,rr,trtype,nercrr) + $[routes(rr,r,trtype,t)$routes_prm(rr,r)$routes_nercr(nercrr,nercr,rr,r)], + FLOW(rr,r,h,t,trtype) * (1 - tranloss(rr,r,trtype)) } +* exports [MW] + - sum{(r,rr,trtype,nercrr) + $[routes(r,rr,trtype,t)$routes_prm(r,rr)$routes_nercr(nercr,nercrr,r,rr)], + FLOW(r,rr,h,t,trtype) } +; + +* --------------------------------------------------------------------------- + +* NERC regions are only allowed to import firm capacity up to their limit +eq_firm_transfer_limit_cc(nercr,ccseason,t) + $[tmodel(t) + $Sw_PRMTRADE_limit + $(yeart(t)<=Sw_PRMTRADE_limit) + $Sw_PRM_CapCredit].. + + firm_transfer_limit(nercr,t) + + =g= + +* net transmission imports (i.e. minus exports) accounting for losses on imports +* imports [MW] + + sum{(r,rr,trtype,nercrr) + $[routes(rr,r,trtype,t)$routes_prm(rr,r)$routes_nercr(nercrr,nercr,rr,r)], + PRMTRADE(rr,r,trtype,ccseason,t) * (1 - tranloss(rr,r,trtype)) } +* exports [MW] + - sum{(r,rr,trtype,nercrr) + $[routes(r,rr,trtype,t)$routes_prm(r,rr)$routes_nercr(nercr,nercrr,r,rr)], + PRMTRADE(r,rr,trtype,ccseason,t) } +; + +* --------------------------------------------------------------------------- + * CAP_CONVERTER accumulates INV_CONVERTER from years <= t eq_CAP_CONVERTER(r,t) $[tmodel(t) @@ -1924,7 +1981,7 @@ eq_emit_accounting(e,r,t)$tmodel(t).. =e= - sum{(i,v,h)$[valgen(i,v,r,t)], + sum{(i,v,h)$[valgen(i,v,r,t)$h_rep(h)], hours(h) * emit_rate(e,i,v,r,t) * (GEN(i,v,r,h,t) + CCSFLEX_POW(i,v,r,h,t)$[ccsflex(i)$(Sw_CCSFLEX_BYP OR Sw_CCSFLEX_STO OR Sw_CCSFLEX_DAC)]) @@ -1932,7 +1989,7 @@ eq_emit_accounting(e,r,t)$tmodel(t).. * Plus emissions produced via production activities (SMR, SMR-CCS, DAC) * The "production" of negative CO2 emissions via DAC is also included here - + sum{(p,i,v,h)$[valcap(i,v,r,t)$i_p(i,p)], + + sum{(p,i,v,h)$[valcap(i,v,r,t)$i_p(i,p)$h_rep(h)], hours(h) * prod_emit_rate(e,i,t) * PRODUCE(p,i,v,r,h,t) } / emit_scale(e) @@ -1941,10 +1998,12 @@ eq_emit_accounting(e,r,t)$tmodel(t).. *capture = capture per energy used by the ccs system * CCS energy * Flexible CCS - bypass - - (sum{(i,v,h)$[valgen(i,v,r,t)$ccsflex_byp(i)], ccsflex_co2eff(i,t) * hours(h) * CCSFLEX_POW(i,v,r,h,t) } / emit_scale(e)) $[sameas(e,"co2")]$Sw_CCSFLEX_BYP + - (sum{(i,v,h)$[valgen(i,v,r,t)$ccsflex_byp(i)$h_rep(h)], + ccsflex_co2eff(i,t) * hours(h) * CCSFLEX_POW(i,v,r,h,t) } / emit_scale(e)) $[sameas(e,"co2")]$Sw_CCSFLEX_BYP * Flexible CCS - storage - - (sum{(i,v,h)$[valgen(i,v,r,t)$ccsflex_sto(i)], ccsflex_co2eff(i,t) * hours(h) * CCSFLEX_POWREQ(i,v,r,h,t) } / emit_scale(e)) $[sameas(e,"co2")]$Sw_CCSFLEX_STO + - (sum{(i,v,h)$[valgen(i,v,r,t)$ccsflex_sto(i)$h_rep(h)], + ccsflex_co2eff(i,t) * hours(h) * CCSFLEX_POWREQ(i,v,r,h,t) } / emit_scale(e)) $[sameas(e,"co2")]$Sw_CCSFLEX_STO ; * --------------------------------------------------------------------------- @@ -1977,6 +2036,7 @@ eq_state_cap(st,t) * regions (rr) are those that have connection with cap regions. + sum{(h,r,rr,trtype) $[r_st(r,st)$(not r_st(rr,st))$routes(rr,r,trtype,t) + $h_rep(h) * If there is a national zero-carbon cap in the present year, * set emissions intensity of imports to zero. $(not (Sw_AnnualCap and not emit_cap("CO2",t)))], @@ -1998,7 +2058,7 @@ eq_CSAPR_Budget(csapr_group,t)$[Sw_CSAPR$tmodel(t)$(yeart(t)>=csapr_startyr)].. *must exceed the summed-over-state hourly-weighted nox emissions by csapr group sum{st$csapr_group_st(csapr_group,st), - sum{(i,v,h,r)$[r_st(r,st)$valgen(i,v,r,t)], + sum{(i,v,h,r)$[r_st(r,st)$valgen(i,v,r,t)$h_rep(h)], h_weight_csapr(h) * hours(h) * emit_rate("NOX",i,v,r,t) * GEN(i,v,r,h,t) / emit_scale("NOX") } } @@ -2017,7 +2077,7 @@ eq_CSAPR_Assurance(st,t)$[stfeas(st)$(yeart(t)>=csapr_startyr) =g= *must exceed the csapr-hourly-weighted nox emissions by state - sum{(i,v,h,r)$[r_st(r,st)$valgen(i,v,r,t)], + sum{(i,v,h,r)$[r_st(r,st)$valgen(i,v,r,t)$h_rep(h)], h_weight_csapr(h) * hours(h) * emit_rate("NOX",i,v,r,t) * GEN(i,v,r,h,t) / emit_scale("NOX") } ; @@ -2028,7 +2088,7 @@ eq_emit_rate_limit(e,r,t)$[(yeart(t)>=CarbPolicyStartyear)$emit_rate_con(e,r,t) $tmodel(t)].. emit_rate_limit(e,r,t) * ( - sum{(i,v,h)$[valgen(i,v,r,t)], hours(h) * GEN(i,v,r,h,t) } + sum{(i,v,h)$[valgen(i,v,r,t)$h_rep(h)], hours(h) * GEN(i,v,r,h,t) } ) / emit_scale(e) =g= @@ -2077,23 +2137,23 @@ eq_cdr_cap(t) $Sw_NoFossilOffsetCDR].. *** CO2 emissions from fossil CCS... - + sum{(i,v,r,h)$[valgen(i,v,r,t)$ccs(i)$(not beccs(i))], + + sum{(i,v,r,h)$[valgen(i,v,r,t)$ccs(i)$(not beccs(i))$h_rep(h)], hours(h) * emit_rate("CO2",i,v,r,t) * GEN(i,v,r,h,t) / emit_scale("CO2") } *** ...and methane leakage from fossil CCS (if included in national policy)... * Methane emissions * global warming potential * [ton CH4] * [ton CO2 / ton CH4] * [emit scale CH4 / cmit scale CO2] - + sum{(i,v,r,h)$[valgen(i,v,r,t)$ccs(i)$(not beccs(i))$Sw_AnnualCapCO2e], + + sum{(i,v,r,h)$[valgen(i,v,r,t)$ccs(i)$(not beccs(i))$h_rep(h)$Sw_AnnualCapCO2e], hours(h) * emit_rate("CH4",i,v,r,t) * GEN(i,v,r,h,t) * Sw_MethaneGWP / emit_scale("CO2") } =g= *** ...must be greater than emissions offset by CDR (negative emissions so negative signs here) ** DAC - - sum{(p,i,v,r,h)$[valcap(i,v,r,t)$i_p(i,p)$dac(i)$sameas(p,"DAC")], + - sum{(p,i,v,r,h)$[valcap(i,v,r,t)$i_p(i,p)$dac(i)$sameas(p,"DAC")$h_rep(h)], hours(h) * prod_emit_rate("CO2",i,t) * PRODUCE(p,i,v,r,h,t) / emit_scale("CO2") } ** BECCS - - sum{(i,v,r,h)$[valgen(i,v,r,t)$beccs(i)], + - sum{(i,v,r,h)$[valgen(i,v,r,t)$beccs(i)$h_rep(h)], hours(h) * emit_rate("CO2",i,v,r,t) * GEN(i,v,r,h,t) / emit_scale("CO2") } ; @@ -2112,9 +2172,9 @@ eq_REC_Generation(RPSCat,i,st,t)$[stfeas(st)$(not tfirst(t))$tmodel(t) *RECS are computed as the total annual generation from a technology *hydro is the only technology adjusted by RPSTechMult *because GEN from pvb(i) includes grid charging, subtract out its grid charging - + sum{(v,r,h)$[valgen(i,v,r,t)$r_st(r,st)], + + sum{(v,r,h)$[valgen(i,v,r,t)$r_st(r,st)$h_rep(h)], RPSTechMult(RPSCat,i,st) * hours(h) - * (GEN(i,v,r,h,t) - (STORAGE_IN_PVB_G(i,v,r,h,t) * storage_eff_pvb_g(i,t))$[pvb(i)$Sw_PVB] ) + * (GEN(i,v,r,h,t) - (STORAGE_IN_GRID(i,v,r,h,t) * storage_eff_pvb_g(i,t))$[storage_hybrid(i)$(not csp(i))$Sw_HybridPlant] ) } =g= @@ -2124,11 +2184,11 @@ eq_REC_Generation(RPSCat,i,st,t)$[stfeas(st)$(not tfirst(t))$tmodel(t) RECS(RPSCat,i,st,ast,t) } * RPS_Bundled RECS and RPS_All RECS can meet the same requirement * therefore lumping them together to avoid double-counting - + sum{ast$[RecMap(i,"RPS_Bundled",st,ast,t)$stfeas(ast)], + + sum{ast$[RecMap(i,"RPS_Bundled",st,ast,t)$stfeas(ast)$(not sameas(st,ast))], RECS("RPS_Bundled",i,st,ast,t) }$[sameas(RPSCat,"RPS_All")] *same logic as bundled RPS RECS is applied to the bundled CES RECS - + sum{ast$[RecMap(i,"CES_Bundled",st,ast,t)$stfeas(ast)], + + sum{ast$[RecMap(i,"CES_Bundled",st,ast,t)$stfeas(ast)$(not sameas(st,ast))], RECS("CES_Bundled",i,st,ast,t) }$[sameas(RPSCat,"CES")] ; @@ -2143,29 +2203,36 @@ eq_REC_Requirement(RPSCat,st,t)$[RecPerc(RPSCat,st,t)$(not tfirst(t)) $(not sameas(RPSCat,"RPS_Bundled")) $(not sameas(RPSCat,"CES_Bundled"))].. -* RECs owned (i.e. imported and generated/used in state) - sum{(i,ast)$[RecMap(i,RPSCat,ast,st,t)$stfeas(ast)], +* RECs owned (i.e. imported and generated/used in state minus exports) + + sum{(i,ast)$[RecMap(i,RPSCat,ast,st,t)$stfeas(ast)], RECS(RPSCat,i,ast,st,t) } + - sum{(i,ast)$[RecMap(i,RPSCat,st,ast,t)$stfeas(ast)$(not sameas(st,ast))], + RECS(RPSCat,i,st,ast,t) } -* bundled RECS can also be used to meet the RPS_All requirements +* bundled RECS can also be used to meet the RPS_All requirements (imports minus exports) + sum{(i,ast)$[RecMap(i,"RPS_Bundled",ast,st,t)$stfeas(ast)$(not sameas(ast,st))], RECS("RPS_Bundled",i,ast,st,t) }$[sameas(RPSCat,"RPS_All")] + - sum{(i,ast)$[RecMap(i,"RPS_Bundled",st,ast,t)$stfeas(ast)$(not sameas(st,ast))], + RECS("RPS_Bundled",i,st,ast,t) }$[sameas(RPSCat,"RPS_All")] -* bundled CES credits can also be used to meet the CES requirements +* bundled CES credits can also be used to meet the CES requirements (imports minus exports) + sum{(i,ast)$[RecMap(i,"CES_Bundled",ast,st,t)$stfeas(ast)$(not sameas(ast,st))], RECS("CES_Bundled",i,ast,st,t) }$[sameas(RPSCat,"CES")] + - sum{(i,ast)$[RecMap(i,"CES_Bundled",st,ast,t)$stfeas(ast)$(not sameas(st,ast))], + RECS("CES_Bundled",i,st,ast,t) }$[sameas(RPSCat,"CES")] * ACP credits can also be purchased + ACP_PURCHASES(rpscat,st,t)$(not acp_disallowed(st,RPSCat)) * Exports to Canada are assumed to be clean, and therefore consume CES credits - - sum{(r,h)$[r_st(r,st)], can_exports_h(r,h,t) * hours(h) }$[(Sw_Canada=1)$sameas(RPSCat,"CES")] + - sum{(r,h)$[r_st(r,st)$h_rep(h)], + can_exports_h(r,h,t) * hours(h) }$[(Sw_Canada=1)$sameas(RPSCat,"CES")] =g= * note here we do not pre-define the rec requirement since load_exog(r,h,t) * changes when sent to/from the demand side - RecPerc(RPSCat,st,t) * sum{(r,h)$[r_st_rps(r,st)], hours(h) *( + RecPerc(RPSCat,st,t) * sum{(r,h)$[r_st_rps(r,st)$h_rep(h)], hours(h) * ( * RecStyle(st,RPSCat)=0 means end-use sales. ( (LOAD(r,h,t) - can_exports_h(r,h,t)$[Sw_Canada=1] - sum{v$valgen("distpv",v,r,t), GEN("distpv",v,r,h,t) }) * (1.0 - distloss) @@ -2181,7 +2248,7 @@ eq_REC_Requirement(RPSCat,st,t)$[RecPerc(RPSCat,st,t)$(not tfirst(t)) *subtract out its grid charging (see eq_REC_Generation above). + ( sum{(i,v)$[valgen(i,v,r,t)$(not storage_standalone(i))], GEN(i,v,r,h,t) - (distloss * GEN(i,v,r,h,t))$(distpv(i) or dupv(i)) - - (STORAGE_IN_PVB_G(i,v,r,h,t) * storage_eff_pvb_g(i,t))$[pvb(i)$Sw_PVB] } + - (STORAGE_IN_GRID(i,v,r,h,t) * storage_eff_pvb_g(i,t))$[storage_hybrid(i)$(not csp(i))$Sw_HybridPlant] } - can_exports_h(r,h,t)$[(Sw_Canada=1)$sameas(RPSCat,"CES")] )$(RecStyle(st,RPSCat)=2) )} @@ -2196,7 +2263,7 @@ eq_REC_BundleLimit(RPSCat,st,ast,t)$[stfeas(st)$stfeas(ast)$tmodel(t) $(yeart(t)>=RPS_StartYear)].. *amount of net transmission flows from state st to state ast - sum{(h,r,rr,trtype)$[r_st(r,st)$r_st(rr,ast)$routes(r,rr,trtype,t)], + sum{(h,r,rr,trtype)$[r_st(r,st)$r_st(rr,ast)$routes(r,rr,trtype,t)$h_rep(h)], hours(h) * FLOW(r,rr,h,t,trtype) } @@ -2213,7 +2280,7 @@ eq_REC_unbundledLimit(RPSCat,st,t)$[st_unbundled_limit(RPScat,st)$tmodel(t)$stfe $(sameas(RPSCat,"RPS_All") or sameas(RPSCat,"CES"))].. *the limit on unbundled RECS times the REC requirement (based on end-use sales) REC_unbundled_limit(RPSCat,st,t) * RecPerc(RPSCat,st,t) * - sum{(r,h)$r_st(r,st), + sum{(r,h)$[r_st(r,st)$h_rep(h)], hours(h) * (LOAD(r,h,t) - can_exports_h(r,h,t)$[Sw_Canada=1] - sum{v$valgen("distpv",v,r,t), GEN("distpv",v,r,h,t) }) * (1.0 - distloss) } @@ -2240,7 +2307,7 @@ eq_REC_ooslim(RPSCat,st,t)$[RecPerc(RPSCat,st,t)$(yeart(t)>=RPS_StartYear) *the fraction of imported recs times the requirement (based on end-use sales) RPS_oosfrac(st) * RecPerc(RPSCat,st,t) * - sum{(r,h)$r_st(r,st), + sum{(r,h)$[r_st(r,st)$h_rep(h)], hours(h) * (LOAD(r,h,t) - can_exports_h(r,h,t)$[Sw_Canada=1] - sum{v$valgen("distpv",v,r,t), GEN("distpv",v,r,h,t) }) * (1.0 - distloss) } @@ -2269,21 +2336,21 @@ eq_REC_launder(RPSCat,st,t)$[RecStates(RPSCat,st,t)$(not tfirst(t))$(yeart(t)>=R $(not sameas(RPSCat,"CES_Bundled"))].. *in-state REC generation - sum{(i,v,r,h)$(valgen(i,v,r,t)$RecTech(RPSCat,i,st,t)$r_st(r,st)), + sum{(i,v,r,h)$[valgen(i,v,r,t)$RecTech(RPSCat,i,st,t)$r_st(r,st)$h_rep(h)], hours(h) * GEN(i,v,r,h,t) } =g= *exported RECS - NB the conditional that st!=ast - sum{(i,ast)$[RecMap(i,RPSCat,ast,st,t)$(stfeas(ast) or sameas(ast,"voluntary"))$(not sameas(st,ast))], + sum{(i,ast)$[RecMap(i,RPSCat,st,ast,t)$(stfeas(ast) or sameas(ast,"voluntary"))$(not sameas(st,ast))], RECS(RPSCat,i,st,ast,t) } - + sum{(i,ast)$[RecMap(i,"RPS_Bundled",ast,st,t)$stfeas(ast)$(not sameas(st,ast))], - RECS("RPS_Bundled",i,ast,st,t) + + sum{(i,ast)$[RecMap(i,"RPS_Bundled",st,ast,t)$stfeas(ast)$(not sameas(st,ast))], + RECS("RPS_Bundled",i,st,ast,t) }$sameas(RPSCat,"RPS_All") - + sum{(i,ast)$[RecMap(i,"CES_Bundled",ast,st,t)$stfeas(ast)$(not sameas(st,ast))], - RECS("CES_Bundled",i,ast,st,t) + + sum{(i,ast)$[RecMap(i,"CES_Bundled",st,ast,t)$stfeas(ast)$(not sameas(st,ast))], + RECS("CES_Bundled",i,st,ast,t) }$sameas(RPSCat,"CES") ; @@ -2324,7 +2391,7 @@ eq_batterymandate(st,t)$[tmodel(t)$batterymandate(st,t)$(yeart(t)>=firstyear_bat eq_national_gen(t)$[tmodel(t)$national_gen_frac(t)$Sw_GenMandate].. *generation from renewables (already post-curtailment) - sum{(i,v,r,h)$[nat_gen_tech_frac(i)$valgen(i,v,r,t)], + sum{(i,v,r,h)$[nat_gen_tech_frac(i)$valgen(i,v,r,t)$h_rep(h)], GEN(i,v,r,h,t) * hours(h) * nat_gen_tech_frac(i) } =g= @@ -2335,16 +2402,16 @@ eq_national_gen(t)$[tmodel(t)$national_gen_frac(t)$Sw_GenMandate].. * if Sw_GenMandate = 1, then apply the fraction to the bus bar load ( * load - sum{(r,h), LOAD(r,h,t) * hours(h) } + sum{(r,h)$h_rep(h), LOAD(r,h,t) * hours(h) } * [plus] transmission losses - + sum{(rr,r,h,trtype)$routes(rr,r,trtype,t), (tranloss(rr,r,trtype) * FLOW(rr,r,h,t,trtype) * hours(h)) } + + sum{(rr,r,h,trtype)$[routes(rr,r,trtype,t)$h_rep(h)], (tranloss(rr,r,trtype) * FLOW(rr,r,h,t,trtype) * hours(h)) } * [plus] storage losses - + sum{(i,v,r,h)$[valcap(i,v,r,t)$storage_standalone(i)], STORAGE_IN(i,v,r,h,t) * hours(h) } - - sum{(i,v,r,h)$[valcap(i,v,r,t)$storage_standalone(i)], GEN(i,v,r,h,t) * hours(h) } + + sum{(i,v,r,h)$[valcap(i,v,r,t)$storage_standalone(i)$h_rep(h)], STORAGE_IN(i,v,r,h,t) * hours(h) } + - sum{(i,v,r,h)$[valcap(i,v,r,t)$storage_standalone(i)$h_rep(h)], GEN(i,v,r,h,t) * hours(h) } )$[Sw_GenMandate = 1] * if Sw_GenMandate = 2, then apply the fraction to the end use load - + (sum{(r,h), + + (sum{(r,h)$h_rep(h), hours(h) * ( (LOAD(r,h,t) - can_exports_h(r,h,t)$[Sw_Canada=1]) * (1.0 - distloss) - sum{v$valgen("distpv",v,r,t), GEN("distpv",v,r,h,t) }) })$[Sw_GenMandate = 2] @@ -2360,7 +2427,7 @@ eq_national_gen(t)$[tmodel(t)$national_gen_frac(t)$Sw_GenMandate].. * --------------------------------------------------------------------------- *gas used from each bin is the sum of all gas used -eq_gasused(cendiv,h,t)$[tmodel(t)$((Sw_GasCurve=0) or (Sw_GasCurve=3))$hours(h)].. +eq_gasused(cendiv,h,t)$[tmodel(t)$((Sw_GasCurve=0) or (Sw_GasCurve=3))].. sum{gb,GASUSED(cendiv,gb,h,t) } @@ -2493,7 +2560,7 @@ eq_biousedlimit(bioclass,usda_region,t)$tmodel(t).. *storage use cannot exceed capacity *this constraint does not apply to CSP+TES or hydro pump upgrades -eq_storage_capacity(i,v,r,h,t)$[valgen(i,v,r,t)$(storage_standalone(i) or pvb(i))$tmodel(t)].. +eq_storage_capacity(i,v,r,h,t)$[valgen(i,v,r,t)$(storage_standalone(i) or storage_hybrid(i)$(not csp(i)))$tmodel(t)].. * [plus] Capacity of all storage technologies (CAP(i,v,r,t) * bcr(i) * avail(i,h) @@ -2502,19 +2569,19 @@ eq_storage_capacity(i,v,r,h,t)$[valgen(i,v,r,t)$(storage_standalone(i) or pvb(i) =g= -* [plus] Generation from storage, excluding hybrid PV+Battery and adjusting evmc_storage for time-varying discharge (deferral) availability - GEN(i,v,r,h,t)$(not pvb(i)) / (1$(not evmc_storage(i)) + evmc_storage_discharge_frac(i,r,h,t)$evmc_storage(i)) +* [plus] Generation from storage, excluding hybrid+storage and adjusting evmc_storage for time-varying discharge (deferral) availability* [plus] Generation from storage, and adjusting evmc_storage for time-varying discharge (deferral) availability + GEN(i,v,r,h,t)$(not storage_hybrid(i)$(not csp(i))) / (1$(not evmc_storage(i)) + evmc_storage_discharge_frac(i,r,h,t)$evmc_storage(i)) -* [plus] Generation from battery of hybrid PV+Battery - + GEN_PVB_B(i,v,r,h,t)$[pvb(i)$Sw_PVB] +* [plus] Generation from battery of hybrid+storage + + GEN_STORAGE(i,v,r,h,t)$[storage_hybrid(i)$(not csp(i))$Sw_HybridPlant] * [plus] Storage charging -* not hybrid PV+Battery and adjusting evmc_storage for time-varying charge (add back deferred EV load) availability - + STORAGE_IN(i,v,r,h,t)$[not pvb(i)] / (1$(not evmc_storage(i)) + evmc_storage_charge_frac(i,r,h,t)$evmc_storage(i)) -* hybrid PV+Battery: PV - + STORAGE_IN_PVB_P(i,v,r,h,t)$[pvb(i)$dayhours(h)$Sw_PVB] -* hybrid PV+Battery: Grid - + STORAGE_IN_PVB_G(i,v,r,h,t)$[pvb(i)$Sw_PVB] +* excludes hybrid plant+storage and adjusting evmc_storage for time-varying charge (add back deferred EV load) availability + + STORAGE_IN(i,v,r,h,t)$[not storage_hybrid(i)$(not csp(i))] / (1$(not evmc_storage(i)) + evmc_storage_charge_frac(i,r,h,t)$evmc_storage(i)) +* hybrid+storage plant: plant generation + + STORAGE_IN_PLANT(i,v,r,h,t)$[storage_hybrid(i)$(not csp(i))$dayhours(h)$Sw_HybridPlant] +* hybrid+storage plant: Grid generation + + STORAGE_IN_GRID(i,v,r,h,t)$[storage_hybrid(i)$(not csp(i))$Sw_HybridPlant] * [plus] Operating reserves + sum{ortype$[Sw_OpRes$opres_model(ortype)$opres_h(h)], @@ -2553,20 +2620,21 @@ eq_storage_level(i,v,r,h,t)$[valgen(i,v,r,t)$storage(i)$(within_seas_frac(i,v,r) sum{szn$h_szn(h,szn), m_cf_szn(i,v,r,szn,t) } )$hyd_add_pump(i) -*[plus] energy into hybrid PV+battery storage -*hybrid pv+battery: PV charging +*[plus] energy into hybrid plant storage +*hybrid+storage plant: plant charging + storage_eff_pvb_p(i,t) * hours_daily(h) - * STORAGE_IN_PVB_P(i,v,r,h,t)$[pvb(i)$dayhours(h)$Sw_PVB] + * STORAGE_IN_PLANT(i,v,r,h,t)$[storage_hybrid(i)$(not csp(i))$dayhours(h)$Sw_HybridPlant] -*hybrid pv+battery: grid charging - + storage_eff_pvb_g(i,t) * hours_daily(h) * STORAGE_IN_PVB_G(i,v,r,h,t)$[pvb(i)$Sw_PVB] +*hybrid+storage plant: grid charging + + storage_eff_pvb_g(i,t) * hours_daily(h) + * STORAGE_IN_GRID(i,v,r,h,t)$[storage_hybrid(i)$(not csp(i))$Sw_HybridPlant] *[minus] generation from stand-alone storage (discharge) and CSP -*exclude hybrid PV+Battery because GEN refers to output from both the PV and the battery - - hours_daily(h) * GEN(i,v,r,h,t)$[not pvb(i)] +*exclude hybrid+storage plant because GEN refers to output from both the plant and the battery + - hours_daily(h) * GEN(i,v,r,h,t)$[not storage_hybrid(i)$(not csp(i))] -*[minus] Generation from Battery (discharge) of hybrid PV+Battery - - hours_daily(h) * GEN_PVB_B(i,v,r,h,t) $[pvb(i)$Sw_PVB] +*[minus] Generation from Battery (discharge) of hybrid+storage plant + - hours_daily(h) * GEN_STORAGE(i,v,r,h,t) $[storage_hybrid(i)$(not csp(i))$Sw_HybridPlant] *[minus] losses from reg reserves (only half because only charging half *the time while providing reg reserves) @@ -2583,27 +2651,11 @@ eq_storage_seas(i,v,r,t) $[valgen(i,v,r,t)$storage(i) $(within_seas_frac(i,v,r) < 1)$tmodel(t)].. - sum{h, + sum{h$h_rep(h), *[plus] annual storage charging - storage_eff(i,t) * hours(h) * ( + storage_eff(i,t) * hours(h) *energy into stand-alone storage (not CSP-TES) and hydropower that adds pumping - STORAGE_IN(i,v,r,h,t)$(storage_standalone(i) or hyd_add_pump(i)) - -*** vvv within_seas_frac(i,v,r) is 1 for all techs besides PSH and dispatchable hydro, -*** so these lines are never executed -*energy into storage from CSP field - + (CAP(i,v,r,t) * csp_sm(i) * m_cf(i,v,r,h,t) - )$[CSP_Storage(i)$valcap(i,v,r,t)] - ) - -*[plus] energy into hybrid PV+battery storage -*hybrid pv+battery: PV charging - + storage_eff_pvb_p(i,t) * hours(h) - * STORAGE_IN_PVB_P(i,v,r,h,t)$[pvb(i)$dayhours(h)$Sw_PVB] - -*hybrid pv+battery: grid charging - + storage_eff_pvb_g(i,t) * hours(h) * STORAGE_IN_PVB_G(i,v,r,h,t)$[pvb(i)$Sw_PVB] -*** ^^^ + * STORAGE_IN(i,v,r,h,t)$(storage_standalone(i) or hyd_add_pump(i)) *[plus] annual water inflow energy available for hydropower that adds pumping + (CAP(i,v,r,t) * avail(i,h) * hours(h) * @@ -2613,7 +2665,7 @@ eq_storage_seas(i,v,r,t) =e= *[plus] annual generation - sum{h, hours(h) * GEN(i,v,r,h,t) } + sum{h$h_rep(h), hours(h) * GEN(i,v,r,h,t) } ; * --------------------------------------------------------------------------- @@ -2621,7 +2673,7 @@ eq_storage_seas(i,v,r,t) * Minimum amount of storage input in a season to be used for generation in that season, * when cross-season energy shifting is available eq_storage_seas_szn(i,v,r,szn,t) - $[valgen(i,v,r,t)$storage(i) + $[valgen(i,v,r,t)$storage(i)$szn_rep(szn) $(within_seas_frac(i,v,r) < 1)$tmodel(t)].. *[plus] seasonal generation @@ -2633,24 +2685,10 @@ eq_storage_seas_szn(i,v,r,szn,t) within_seas_frac(i,v,r) * *[plus] seasonal storage charging sum{h$h_szn(h,szn), - storage_eff(i,t) * hours(h) * + storage_eff(i,t) * hours(h) *energy into stand-alone storage (not CSP-TES) and hydropower that adds pumping - ( STORAGE_IN(i,v,r,h,t)$(storage_standalone(i) or hyd_add_pump(i)) + * STORAGE_IN(i,v,r,h,t)$(storage_standalone(i) or hyd_add_pump(i)) -*** vvv within_seas_frac(i,v,r) is 1 for all techs besides PSH and dispatchable hydro, -*** so these lines are never executed -*energy into storage from CSP field - + (CAP(i,v,r,t) * csp_sm(i) * m_cf(i,v,r,h,t) - )$[CSP_Storage(i)$valcap(i,v,r,t)] - ) - -*[plus] energy into hybrid PV+battery storage -*hybrid pv+battery: PV charging - + storage_eff_pvb_p(i,t) * hours(h) - * STORAGE_IN_PVB_P(i,v,r,h,t)$[pvb(i)$dayhours(h)$Sw_PVB] -*hybrid pv+battery: grid charging - + storage_eff_pvb_g(i,t) * hours(h) * STORAGE_IN_PVB_G(i,v,r,h,t)$[pvb(i)$Sw_PVB] -*** ^^^ *[plus] seasonal water inflow energy available for hydropower that adds pumping + (CAP(i,v,r,t) * avail(i,h) * hours(h) @@ -2664,16 +2702,16 @@ eq_storage_seas_szn(i,v,r,szn,t) *there must be sufficient energy in storage to provide operating reserves eq_storage_opres(i,v,r,h,t) $[valgen(i,v,r,t)$tmodel(t)$Sw_OpRes$opres_h(h) - $(storage_standalone(i) or pvb(i) or hyd_add_pump(i))].. + $(storage_standalone(i) or storage_hybrid(i)$(not csp(i)) or hyd_add_pump(i))].. *[plus] initial storage level STORAGE_LEVEL(i,v,r,h,t) *[minus] generation that occurs during this timeslice - - hours_daily(h) * GEN(i,v,r,h,t) $[not pvb(i)] + - hours_daily(h) * GEN(i,v,r,h,t) $[not storage_hybrid(i)$(not csp(i))] *[minus] generation that occurs during this timeslice - - hours_daily(h) * GEN_PVB_B(i,v,r,h,t) $[pvb(i)$Sw_PVB] + - hours_daily(h) * GEN_STORAGE(i,v,r,h,t) $[storage_hybrid(i)$(not csp(i))$Sw_HybridPlant] *[minus] losses from reg reserves (only half because only charging half *the time while providing reg reserves) @@ -2759,23 +2797,21 @@ eq_storage_in_minloading(i,v,r,h,hh,t)$[(storage_standalone(i) or hyd_add_pump(i * --------------------------------------------------------------------------- *=============================== -* --- Hybrid PV+Battery --- +* --- Hybrid Plant --- *=============================== * --------------------------------------------------------------------------- -* Generation post curtailment = -* + generation from pv (post curtailment) -* + generation from battery -* - storage charging from PV -eq_pvb_total_gen(i,v,r,h,t)$[pvb(i)$tmodel(t)$valgen(i,v,r,t)$Sw_PVB].. +*Generation post curtailment = +* + generation from hybrid storage plant + generation from storage - storage charging from hybrid storage plant +eq_plant_total_gen(i,v,r,h,t)$[storage_hybrid(i)$(not csp(i))$tmodel(t)$valgen(i,v,r,t)$Sw_HybridPlant].. - + GEN_PVB_P(i,v,r,h,t) + + GEN_PLANT(i,v,r,h,t) - + GEN_PVB_B(i,v,r,h,t) + + GEN_STORAGE(i,v,r,h,t) -* [minus] charging from PV (1) for curtailment recovery and (2) not for curtailment recovery - - STORAGE_IN_PVB_P(i,v,r,h,t)$dayhours(h) +*[minus] charging from hybrid storage plant + - STORAGE_IN_PLANT(i,v,r,h,t)$dayhours(h) =e= @@ -2784,63 +2820,63 @@ eq_pvb_total_gen(i,v,r,h,t)$[pvb(i)$tmodel(t)$valgen(i,v,r,t)$Sw_PVB].. * --------------------------------------------------------------------------- -* Energy to storage from PV (not for curtailment recovery) + PV generation (post curtailment) <= PV resource -* capacity factor is adjusted to include inverter losses, clipping losses, and low voltage losses -eq_pvb_array_energy_limit(i,v,r,h,t)$[pvb(i)$tmodel(t)$valgen(i,v,r,t)$valcap(i,v,r,t)$Sw_PVB].. +*Energy to storage from hybrid storage palnt + hybrid storage plant generation <= hybrid storage plant maximum production for a resource +*capacity factor is adjusted to include inverter losses, clipping losses, and low voltage losses +eq_hybrid_plant_energy_limit(i,v,r,h,t)$[storage_hybrid(i)$(not csp(i))$tmodel(t)$valgen(i,v,r,t)$valcap(i,v,r,t)$Sw_HybridPlant].. -* [plus] PV output +* [plus] plant output m_cf(i,v,r,h,t) * CAP(i,v,r,t) =g= -* [plus] charging from PV (no curtailment recovery) - + STORAGE_IN_PVB_P(i,v,r,h,t)$dayhours(h) +*[plus] charging from hybrid plant + + STORAGE_IN_PLANT(i,v,r,h,t)$dayhours(h) -* [plus] generation from PV (post curtailment) - + GEN_PVB_P(i,v,r,h,t) +*[plus] generation from hybrid plant + + GEN_PLANT(i,v,r,h,t) ; * --------------------------------------------------------------------------- -* Energy moving through the inverter cannot exceed the inverter capacity -eq_pvb_inverter_limit(i,v,r,h,t)$[pvb(i)$tmodel(t)$valgen(i,v,r,t)$valcap(i,v,r,t)$Sw_PVB].. +*Energy moving through the inverter cannot exceed the inverter capacity +eq_plant_capacity_limit(i,v,r,h,t)$[storage_hybrid(i)$(not csp(i))$tmodel(t)$valgen(i,v,r,t)$valcap(i,v,r,t)$Sw_HybridPlant].. -* [plus] inverter capacity [AC] = panel capacity [DC] / ILR [DC/AC] +*[plus] inverter capacity [AC] = panel capacity [DC] / ILR [DC/AC] + CAP(i,v,r,t) / ilr(i) =g= -* [plus] Output from PV - + GEN_PVB_P(i,v,r,h,t) +* [plus] Output from plant + + GEN_PLANT(i,v,r,h,t) -* [plus] Output form battery - + GEN_PVB_B(i,v,r,h,t) +* [plus] Output form storage + + GEN_STORAGE(i,v,r,h,t) -* [plus] Battery charging from grid - + STORAGE_IN_PVB_G(i,v,r,h,t) +*[plus] battery charging from grid + + STORAGE_IN_GRID(i,v,r,h,t) -* [plus] Battery operating reserves +*[plus] battery operating reserves + sum{ortype$[Sw_OpRes$opres_h(h)$opres_model(ortype)], OPRES(ortype,i,v,r,h,t) } ; * --------------------------------------------------------------------------- -* total energy charged from local PV >= ITC qualification fraction * total energy charged +*Total energy charged from local PV >= ITC qualification fraction * total energy charged eq_pvb_itc_charge_reqt(i,v,r,t)$[pvb(i)$tmodel(t)$valgen(i,v,r,t)$pvb_itc_qual_frac$Sw_PVB].. -* [plus] Battery charging from PV - + sum{h$dayhours(h), STORAGE_IN_PVB_P(i,v,r,h,t) * hours(h) } +* [plus] battery charging from PV + + sum{h$[dayhours(h)$h_rep(h)], STORAGE_IN_PLANT(i,v,r,h,t) * hours(h) } =g= + pvb_itc_qual_frac * ( -* [plus] Battery charging from PV - + sum{h$dayhours(h), STORAGE_IN_PVB_P(i,v,r,h,t) * hours(h) } +* [plus] battery charging from PV + + sum{h$[dayhours(h)$h_rep(h)], STORAGE_IN_PLANT(i,v,r,h,t) * hours(h) } -* [plus] Battery charging from Grid - + sum{h, STORAGE_IN_PVB_G(i,v,r,h,t) * hours(h) } +* [plus] battery charging from Grid + + sum{h$h_rep(h), STORAGE_IN_GRID(i,v,r,h,t) * hours(h) } ) ; @@ -2853,7 +2889,7 @@ eq_pvb_itc_charge_reqt(i,v,r,t)$[pvb(i)$tmodel(t)$valgen(i,v,r,t)$pvb_itc_qual_f *maximum energy shifted to timeslice h from timeslice hh eq_dr_max_shift(i,v,r,h,hh,t)$[allowed_shifts(i,h,hh)$valgen(i,v,r,t)$valcap(i,v,r,t)$dr1(i)$tmodel(t)$Sw_DR].. - CAP(i,v,r,t) * dr_dec(i,r,hh) * hours(hh) * allowed_shifts(i,h,hh) + CAP(i,v,r,t) * dr_decrease(i,r,hh) * hours(hh) * allowed_shifts(i,h,hh) =g= @@ -2863,7 +2899,7 @@ eq_dr_max_shift(i,v,r,h,hh,t)$[allowed_shifts(i,h,hh)$valgen(i,v,r,t)$valcap(i,v *total allowable load decrease in timeslice h eq_dr_max_decrease(i,v,r,h,t)$[valgen(i,v,r,t)$valcap(i,v,r,t)$dr1(i)$tmodel(t)$Sw_DR].. - CAP(i,v,r,t) * dr_dec(i,r,h) * hours(h) + CAP(i,v,r,t) * dr_decrease(i,r,h) * hours(h) =g= @@ -2876,7 +2912,7 @@ eq_dr_max_decrease(i,v,r,h,t)$[valgen(i,v,r,t)$valcap(i,v,r,t)$dr1(i)$tmodel(t)$ * larger, requiring division here to ensure it doesn't exceed allowed increase eq_dr_max_increase(i,v,r,h,t)$[valgen(i,v,r,t)$valcap(i,v,r,t)$dr1(i)$tmodel(t)$Sw_DR].. - CAP(i,v,r,t) * dr_inc(i,r,h) * hours(h) + CAP(i,v,r,t) * dr_increase(i,r,h) * hours(h) =g= @@ -2901,7 +2937,7 @@ eq_dr_gen(i,v,r,h,t)$[valgen(i,v,r,t)$dr(i)$tmodel(t)$SW_DR].. *total allowable load decrease in timeslice h from shed types DR eq_dr_max_shed(i,v,r,h,t)$[valgen(i,v,r,t)$valcap(i,v,r,t)$dr2(i)$tmodel(t)$Sw_DR].. - CAP(i,v,r,t) * dr_dec(i,r,h) * hours(h) + CAP(i,v,r,t) * dr_decrease(i,r,h) * hours(h) =g= @@ -2914,7 +2950,7 @@ eq_dr_max_shed_hrs(i,v,r,t)$[valgen(i,v,r,t)$valcap(i,v,r,t)$dr2(i)$tmodel(t)$Sw =g= - sum{h$dr_dec(i,r,h), DR_SHED(i,v,r,h,t) / dr_dec(i,r,h) } + sum{h$dr_decrease(i,r,h), DR_SHED(i,v,r,h,t) / dr_decrease(i,r,h) } ; @@ -2942,7 +2978,7 @@ eq_Canadian_Imports(r,szn,t)$[can_imports_szn(r,szn,t)$tmodel(t)$(Sw_Canada=1)]. * --------------------------------------------------------------------------- *water accounting for all valid power plants for generation where usage is both for cooling and/or non-cooling purposes -eq_water_accounting(i,v,w,r,h,t)$[i_water(i)$valgen(i,v,r,t)$tmodel(t)$Sw_WaterMain].. +eq_water_accounting(i,v,w,r,h,t)$[i_water(i)$valgen(i,v,r,t)$h_rep(h)$tmodel(t)$Sw_WaterMain].. WAT(i,v,w,r,h,t) @@ -2964,7 +3000,7 @@ eq_water_capacity_total(i,v,r,t)$[tmodel(t)$valcap(i,v,r,t) *require enough water capacity to allow 100% capacity factor (8760 hour operation) *division by 1E6 to convert gal of water_rate(i,w,r) to Mgal - sum{h, hours(h) + sum{h$h_rep(h), hours(h) * sum{w$i_w(i,w), CAP(i,v,r,t) * water_rate(i,w,r) } * (1 + sum{szn, h_szn(h,szn) * seas_cap_frac_delta(i,v,r,szn,t)}) @@ -3016,7 +3052,7 @@ eq_prod_capacity_limit(i,v,r,h,t) $consume(i) $valcap(i,v,r,t) $Sw_Prod - $hours(h)].. + $h_rep(h)].. * available capacity [times] the conversion rate of tonne / MW CAP(i,v,r,t) * avail(i,h) @@ -3031,10 +3067,10 @@ eq_prod_capacity_limit(i,v,r,h,t) ; * H2 demand balance; national and annual. Active only when Sw_H2=1. -eq_h2_demand(p,t)$[(sameas(p,"H2"))$tmodel(t)$(yeart(t)>=Sw_H2_Demand_Start)$(Sw_H2=1)].. +eq_h2_demand(p,t)$[(sameas(p,"H2"))$tmodel(t)$(yeart(t)>=h2_demand_start)$(Sw_H2=1)].. * annual tonnes of production - sum{(i,v,r,h)$[h2(i)$valcap(i,v,r,t)$i_p(i,p)], + sum{(i,v,r,h)$[h2(i)$valcap(i,v,r,t)$i_p(i,p)$h_rep(h)], PRODUCE(p,i,v,r,h,t) * hours(h) } =g= @@ -3044,7 +3080,7 @@ eq_h2_demand(p,t)$[(sameas(p,"H2"))$tmodel(t)$(yeart(t)>=Sw_H2_Demand_Start)$(Sw * assuming here that h2 production and use in H2_CT can be temporally asynchronous * that is, the hydrogen does not need to produced in the same hour it is consumed by h2-ct's - + sum{(i,v,r,h)$[valgen(i,v,r,t)$h2_ct(i)], + + sum{(i,v,r,h)$[valgen(i,v,r,t)$h2_ct(i)$h_rep(h)], GEN(i,v,r,h,t) * hours(h) * h2_ct_intensity * heat_rate(i,v,r,t) } ; @@ -3054,7 +3090,7 @@ eq_h2_demand(p,t)$[(sameas(p,"H2"))$tmodel(t)$(yeart(t)>=Sw_H2_Demand_Start)$(Sw * H2 demand balance; regional and by timeslice w/ H2 transport network and storage. * Active only when Sw_H2=2 [tonne/hour] eq_h2_demand_regional(r,h,t) - $[tmodel(t)$(Sw_H2=2)$(yeart(t)>=Sw_H2_Demand_Start)$hours(h)].. + $[tmodel(t)$(Sw_H2=2)$(yeart(t)>=h2_demand_start)$h_rep(h)].. * endogenous supply of hydrogen sum{(i,v,p)$[h2(i)$valcap(i,v,r,t)$i_p(i,p)], @@ -3083,11 +3119,11 @@ eq_h2_demand_regional(r,h,t) * --------------------------------------------------------------------------- eq_h2_transport_caplimit(r,rr,h,t)$[h2_routes(r,rr)$(Sw_H2=2) - $tmodel(t)$(yeart(t)>=Sw_H2_Demand_Start)].. + $tmodel(t)$(yeart(t)>=h2_demand_start)].. *capacity computed as cumulative investments of h2 pipelines up to the current year sum{tt$[(yeart(tt)<=yeart(t))$(tmodel(tt) or tfix(tt)) - $(yeart(tt)>=Sw_H2_Demand_Start)], + $(yeart(tt)>=h2_demand_start)], H2_TRANSPORT_INV(r,rr,tt)$h2_routes_inv(r,rr) + H2_TRANSPORT_INV(rr,r,tt)$h2_routes_inv(rr,r) } @@ -3101,7 +3137,7 @@ eq_h2_transport_caplimit(r,rr,h,t)$[h2_routes(r,rr)$(Sw_H2=2) * link H2 storage level between timeslices of actual periods, or hours when running a chronological year eq_h2_storage_level(h2_stor,r,actualszn,h,t) - $[tmodel(t)$(yeart(t)>=Sw_H2_Demand_Start)$(Sw_H2_StorTimestep=2) + $[tmodel(t)$(yeart(t)>=h2_demand_start)$(Sw_H2_StorTimestep=2) $h2_stor_r(h2_stor,r)$(Sw_H2=2)$h_actualszn(h,actualszn)].. *[plus] H2 storage level in next timeslice @@ -3121,7 +3157,7 @@ eq_h2_storage_level(h2_stor,r,actualszn,h,t) * link H2 storage level between seasons eq_h2_storage_level_szn(h2_stor,r,actualszn,t) - $[tmodel(t)$(yeart(t)>=Sw_H2_Demand_Start)$(Sw_H2_StorTimestep=1) + $[tmodel(t)$(yeart(t)>=h2_demand_start)$(Sw_H2_StorTimestep=1) $h2_stor_r(h2_stor,r)$(Sw_H2=2)].. *[plus] H2 storage level at start of next season @@ -3143,7 +3179,7 @@ eq_h2_storage_level_szn(h2_stor,r,actualszn,t) * H2 storage capacity [tonnes] eq_h2_storage_capacity(h2_stor,r,t) $[tmodel(t)$(Sw_H2=2) - $h2_stor_r(h2_stor,r)$(yeart(t)>=Sw_H2_Demand_Start)].. + $h2_stor_r(h2_stor,r)$(yeart(t)>=h2_demand_start)].. * [tonnes] sum{tt$[(yeart(tt)<=yeart(t))$(tmodel(tt) or tfix(tt))], @@ -3178,8 +3214,8 @@ eq_h2_storage_flowlimit(h2_stor,r,h,t) $[tmodel(t) $(Sw_H2=2) $h2_stor_r(h2_stor,r) - $(yeart(t)>=Sw_H2_Demand_Start) - $hours(h)].. + $(yeart(t)>=h2_demand_start) + $h_rep(h)].. *storage capacity computed as cumulative investments of H2 storage up to the current year *H2 storage costs estimated for a fixed duration, so using this to link storage capacity and injection rates @@ -3200,7 +3236,7 @@ eq_h2_storage_flowlimit(h2_stor,r,h,t) * total level of H2 storage cannot exceed storage investment for all days * [tonnes] eq_h2_storage_caplimit(h2_stor,r,actualszn,h,t) - $[tmodel(t)$(yeart(t)>=Sw_H2_Demand_Start)$(Sw_H2_StorTimestep=2) + $[tmodel(t)$(yeart(t)>=h2_demand_start)$(Sw_H2_StorTimestep=2) $h2_stor_r(h2_stor,r)$(Sw_H2=2)$h_actualszn(h,actualszn)].. * total storage investment [tonnes] @@ -3217,7 +3253,7 @@ eq_h2_storage_caplimit(h2_stor,r,actualszn,h,t) * total level of H2 storage at the beginning of the day cannot exceed storage investment * [tonnes] eq_h2_storage_caplimit_szn(h2_stor,r,actualszn,t) - $[tmodel(t)$(yeart(t)>=Sw_H2_Demand_Start)$(Sw_H2_StorTimestep=1) + $[tmodel(t)$(yeart(t)>=h2_demand_start)$(Sw_H2_StorTimestep=1) $h2_stor_r(h2_stor,r)$(Sw_H2=2)].. * total storage investment [tonnes] @@ -3240,7 +3276,7 @@ eq_co2_capture(r,h,t) $[tmodel(t) $Sw_CO2_Detail $(yeart(t)>=co2_detail_startyr) - $hours(h)].. + $h_rep(h)].. CO2_CAPTURED(r,h,t) @@ -3329,7 +3365,9 @@ eq_co2_cumul_limit(cs,t)$[tmodel(t)$Sw_CO2_Detail$(yeart(t)>=co2_detail_startyr) =g= *cumulative amount stored over time - sum{(r,h,tt)$[(yeart(tt)<=yeart(t))$(tmodel(tt) or tfix(tt))$(yeart(tt)>=co2_detail_startyr)$r_cs(r,cs)], + sum{(r,h,tt) + $[(yeart(tt)<=yeart(t))$(tmodel(tt) or tfix(tt))$(yeart(tt)>=co2_detail_startyr) + $r_cs(r,cs)$h_rep(h)], yearweight(tt) * hours(h) * CO2_STORED(r,cs,h,tt) } ; * --------------------------------------------------------------------------- @@ -3346,13 +3384,13 @@ eq_ccsflex_byp_ccsenergy_limit(i,v,r,h,t)$[tmodel(t)$valgen(i,v,r,t)$ccsflex_byp * --------------------------------------------------------------------------- -eq_ccsflex_sto_ccsenergy_limit_szn(i,v,r,szn,t)$[tmodel(t)$valgen(i,v,r,t)$ccsflex_sto(i)$Sw_CCSFLEX_STO].. +eq_ccsflex_sto_ccsenergy_limit_szn(i,v,r,szn,t)$[tmodel(t)$valgen(i,v,r,t)$ccsflex_sto(i)$szn_rep(szn)$Sw_CCSFLEX_STO].. sum{h$h_szn(h,szn), hours(h) * CCSFLEX_POW(i,v,r,h,t)} =l= ccsflex_powlim(i,t) * sum{h$h_szn(h,szn), hours(h) * (GEN(i,v,r,h,t) + CCSFLEX_POW(i,v,r,h,t))} ; * --------------------------------------------------------------------------- -eq_ccsflex_sto_ccsenergy_balance(i,v,r,szn,t)$[valgen(i,v,r,t)$ccsflex_sto(i)$tmodel(t)$Sw_CCSFLEX_STO$(Sw_CCSFLEX_STO_LEVEL=0)].. +eq_ccsflex_sto_ccsenergy_balance(i,v,r,szn,t)$[valgen(i,v,r,t)$ccsflex_sto(i)$tmodel(t)$szn_rep(szn)$Sw_CCSFLEX_STO$(Sw_CCSFLEX_STO_LEVEL=0)].. sum{h$h_szn(h,szn), hours(h) * CCSFLEX_POWREQ (i,v,r,h,t) } =e= sum{h$h_szn(h,szn), hours(h) * CCSFLEX_POW(i,v,r,h,t) } ; ; @@ -3373,7 +3411,7 @@ eq_ccsflex_sto_storage_level(i,v,r,h,t)$[valgen(i,v,r,t)$ccsflex_sto(i)$tmodel(t + ccsflex_sto_storage_eff(i,t) * hours_daily(h) * CCSFLEX_POWREQ(i,v,r,h,t) *[minus] storage discharge -*exclude hybrid PV+Battery because GEN refers to output from both the PV and the battery +*exclude hybrid PV+battery because GEN refers to output from both the PV and the battery - hours_daily(h) * CCSFLEX_POW(i,v,r,h,t) ; @@ -3389,4 +3427,4 @@ eq_ccsflex_sto_storage_level_max(i,v,r,h,t)$[valgen(i,v,r,t)$valcap(i,v,r,t)$ccs CCSFLEX_STO_STORAGE_LEVEL(i,v,r,h,t) ; -* --------------------------------------------------------------------------- \ No newline at end of file +* --------------------------------------------------------------------------- diff --git a/c_supplyobjective.gms b/c_supplyobjective.gms index 87a1d59..ce53e79 100644 --- a/c_supplyobjective.gms +++ b/c_supplyobjective.gms @@ -146,17 +146,17 @@ eq_Objfn_op(t)$tmodel(t).. pvf_onm(t) * ( * --- variable O&M costs--- -* all technologies except hybrid PV+battery and DAC - sum{(i,v,r,h)$[valgen(i,v,r,t)$cost_vom(i,v,r,t)$(not pvb(i))], +* all technologies except hybrid plant and DAC + sum{(i,v,r,h)$[valgen(i,v,r,t)$cost_vom(i,v,r,t)$(not storage_hybrid(i)$(not csp(i)))], hours(h) * cost_vom(i,v,r,t) * GEN(i,v,r,h,t) } -* hybrid PV+battery (PV) - + sum{(i,v,r,h)$[valgen(i,v,r,t)$cost_vom_pvb_p(i,v,r,t)$pvb(i)], - hours(h) * cost_vom_pvb_p(i,v,r,t) * GEN_PVB_P(i,v,r,h,t) }$Sw_PVB +* hybrid plant (plant) + + sum{(i,v,r,h)$[valgen(i,v,r,t)$cost_vom_pvb_p(i,v,r,t)$storage_hybrid(i)$(not csp(i))], + hours(h) * cost_vom_pvb_p(i,v,r,t) * GEN_PLANT(i,v,r,h,t) }$Sw_HybridPlant -* hybrid PV+battery (Battery) - + sum{(i,v,r,h)$[valgen(i,v,r,t)$cost_vom_pvb_b(i,v,r,t)$pvb(i)], - hours(h) * cost_vom_pvb_b(i,v,r,t) * GEN_PVB_B(i,v,r,h,t) }$Sw_PVB +* hybrid plant (Battery) + + sum{(i,v,r,h)$[valgen(i,v,r,t)$cost_vom_pvb_b(i,v,r,t)$storage_hybrid(i)$(not csp(i))], + hours(h) * cost_vom_pvb_b(i,v,r,t) * GEN_STORAGE(i,v,r,h,t) }$Sw_HybridPlant * --- fixed O&M costs--- * generation @@ -200,18 +200,22 @@ eq_Objfn_op(t)$tmodel(t).. } * ---operating reserve costs--- - + sum{(i,v,r,h,ortype)$[Sw_OpRes$valgen(i,v,r,t)$cost_opres(i,ortype,t)$opres_model(ortype)$opres_h(h)], - hours(h) * cost_opres(i,ortype,t) * OpRes(ortype,i,v,r,h,t) } + + sum{(i,v,r,h,ortype)$[Sw_OpRes$valgen(i,v,r,t)$cost_opres(i,ortype,t)$reserve_frac(i,ortype)$opres_model(ortype)$opres_h(h)], + hours(h) * cost_opres(i,ortype,t) * OPRES(ortype,i,v,r,h,t) } -* --- cost of coal, nuclear, and other fuels (except coal used for cofiring)--- -* includes H2 fuel costs when using exogenous fuel price (Sw_H2 = 0 and Sw_H2CT = 1) - + sum{(i,v,r,h)$[valgen(i,v,r,t)$(not gas(i))$heat_rate(i,v,r,t) - $(not bio(i))$(not cofire(i))], + +* --- cost of coal, nuclear, and other fixed-price fuels (except coal used for cofiring), +* plus cost of H2 fuel when using fixed price (Sw_H2=0) or during stress periods. +* When using endogenous H2 price (Sw_H2=1 or Sw_H2=2), H2 fuel cost is captured elsewhere +* via the capex + opex costs of H2 production and its associated electricity demand. + + sum{(i,v,r,h)$[valgen(i,v,r,t)$heat_rate(i,v,r,t) + $(not gas(i))$(not bio(i))$(not cofire(i)) + $((not h2_ct(i)) or h2_ct(i)$[(Sw_H2=0) or h_stress(h)])], hours(h) * heat_rate(i,v,r,t) * fuel_price(i,r,t) * GEN(i,v,r,h,t) } * --- startup/ramping costs - + sum{(i,v,r,h,hh)$[Sw_StartCost$startcost(i)$numhours_nexth(h,hh)$valgen(i,v,r,t)], - startcost(i) * numhours_nexth(h,hh) * RAMPUP(i,v,r,h,hh,t) } + + sum{(i,r,h,hh)$[Sw_StartCost$startcost(i)$numhours_nexth(h,hh)$valgen_irt(i,r,t)], + startcost(i) * numhours_nexth(h,hh) * RAMPUP(i,r,h,hh,t) } * --cofire coal consumption--- * cofire bio consumption already accounted for in accounting of BIOUSED @@ -230,7 +234,7 @@ eq_Objfn_op(t)$tmodel(t).. hours(h) * dac_gas_cons_rate("dac_gas",v,t) * PRODUCE("DAC","dac_gas",v,r,h,t) }$Sw_DAC_Gas *Sw_GasCurve = 0 (census division supply curves natural gas prices) - + sum{(cendiv,gb), sum{h,hours(h) * GASUSED(cendiv,gb,h,t) } + + sum{(cendiv,gb), sum{h, hours(h) * GASUSED(cendiv,gb,h,t) } * gasprice(cendiv,gb,t) }$(Sw_GasCurve = 0) @@ -279,7 +283,8 @@ eq_Objfn_op(t)$tmodel(t).. hours(h) * PRODUCE(p,i,v,r,h,t) * CO2_storage_cost }$[Sw_DAC$(not Sw_CO2_Detail)] * ---State RPS alternative compliance payments--- - + sum{(RPSCat,st)$(stfeas(st) or sameas(st,"voluntary")), acp_price(st,t) * ACP_PURCHASES(RPSCat,st,t) + + sum{(RPSCat,st)$[(stfeas(st) or sameas(st,"voluntary"))$RecPerc(RPSCat,st,t)$(not acp_disallowed(st,RPSCat))], + acp_price(st,t) * ACP_PURCHASES(RPSCat,st,t) }$[(yeart(t)>=RPS_StartYear)$Sw_StateRPS] * --- revenues from purchases of curtailed VRE--- @@ -289,7 +294,7 @@ eq_Objfn_op(t)$tmodel(t).. + sum{(r,h)$[(yeart(t)= Sw_H2_Demand_Start)$cap_exist_ir(i,r)] = +fuel_price_filt(i,r)$[Sw_H2$h2_ct(i)$(sum{t$tcur(t),yeart(t) } >= h2_demand_start)$cap_exist_ir(i,r)] = sum{t$tcur(t), (1 / cost_scale) * (1 / pvf_onm(t)) * h2_ct_intensity * ( eq_h2_demand.m('h2',t)$[Sw_H2=1] @@ -357,8 +357,6 @@ execute_unload 'ReEDS_Augur%ds%augur_data%ds%reeds_data_%cur_year%.gdx' degrade_annual dr1 dr2 - dr_inc - dr_dec evmc_shape evmc_storage evmc_shape_gen diff --git a/d_solve_iterate.py b/d_solve_iterate.py index 5452215..4dc0011 100644 --- a/d_solve_iterate.py +++ b/d_solve_iterate.py @@ -143,7 +143,7 @@ def main(casepath, t, overwrite=False): ).columns.astype(int).values tprev = {**{years[0]:years[0]}, **dict(zip(years[1:], years))} - if not int(sw['keep_g00_files']) and (min(years) < t): + if ((not int(sw['keep_g00_files'])) and (not int(sw['debug']))) and (min(years) < t): g00files = glob(os.path.join(casepath, 'g00files', f'*{tprev[t]}i*.g00')) for i in g00files: os.remove(i) diff --git a/d_solveoneyear.gms b/d_solveoneyear.gms index 550e68d..8ccc3e4 100755 --- a/d_solveoneyear.gms +++ b/d_solveoneyear.gms @@ -134,10 +134,10 @@ sdbin_size(ccreg,ccseason,sdbin,t)$tload(t) = sdbin_size_load(ccreg,ccseason,sdb * --- Assign hybrid PV+battery capacity credit --- $ontext Limit the capacity credit of hybrid PV such that the total capacity credit from the PV and the battery do not exceed the inverter limit. - Example: PV = 130 MWdc, Battery = 65MW, Inverter = 100 MW (PVdc/Battery=0.5; PVdc/INVac=1.3) - Assuming the capacity credit of the Battery is 65MW, then capacity credit of the PV is limited to 35MW or 0.269 (35MW/130MW) on a relative basis. - Max capacity credit PV [MWac/MWdc] = (Inverter - Battery capcity credit) / PV_dc - = (P_dc / ILR - P_dc * BCR) / PV_dc + Example: PV = 130 MWdc, Battery = 65 MW, Inverter = 100 MW (PVdc/Battery=0.5; PVdc/INVac=1.3) + Assuming the capacity credit of the Battery is 65 MW, then capacity credit of the PV is limited to 35 MW or 0.269 (35MW/130MW) on a relative basis. + Max capacity credit PV [MWac/MWdc] = (Inverter - Battery capacity credit) / PV_dc + = (PV_dc / ILR - PV_dc * BCR) / PV_dc = 1/ILR - BCR $offtext * marginal capacity credit @@ -339,7 +339,62 @@ $endif.debug * ------------------------------ solve ReEDSmodel minimizing z using lp ; +tsolved(t)$tmodel(t) = yes ; + +if(Sw_NewValCapShrink = 1, + +* remove newv dimensions for technologies that do not have capacity in this year +* and if it is not a vintage you can build in future years +* and if the plant has not been upgraded +* note since we're applying this only to new techs the upgrades portion +* needs to be present in combination with the ability to be built in future periods +* said differently, we want to make sure the vintage cannot be built in future periods, +* it hasn't been built yet, and it has no associated upgraded units +* here the second year index tracks which year has just solved + valcap_remove(i,v,r,t,"%cur_year%")$[newv(v)$valcap(i,v,r,t)$ivt(i,v,"%cur_year%") +* if there is no capacity.. + $(not CAP.l(i,v,r,"%cur_year%")) +* if you have not invested in it.. + $(not sum(tt$[(yeart(tt)<=%cur_year%)], INV.l(i,v,r,tt) )) +* if you cannot invest in the ivt combo in future years.. + $(not sum{tt$[tt.val>%cur_year%],ivt(i,v,tt)}) + $(not sum(tt$[valinv(i,v,r,tt)$(yeart(tt)>%cur_year%)],1)) +* if it has not been upgraded.. +* note the newv condition above allows for the capacity equations +* of motion to still function - this would/does not work for initv vintanges without additional work + $(not sum{(tt,ii)$[tsolved(tt)$upgrade_from(ii,i)$valcap(ii,v,r,tt)], + UPGRADES.l(ii,v,r,tt)}) + ] = yes ; + valcap(i,v,r,t)$valcap_remove(i,v,r,t,"%cur_year%") = no ; + valgen(i,v,r,t)$valcap_remove(i,v,r,t,"%cur_year%") = no ; + valinv(i,v,r,t)$valcap_remove(i,v,r,t,"%cur_year%") = no ; + inv_cond(i,v,r,t,"%cur_year%")$valcap_remove(i,v,r,t,"%cur_year%") = no ; + valcap_irt(i,r,t) = sum{v, valcap(i,v,r,t) } ; + valcap_iv(i,v)$sum{(r,t)$tmodel_new(t), valcap(i,v,r,t) } = yes ; + valcap_i(i)$sum{v, valcap_iv(i,v) } = yes ; + valcap_ivr(i,v,r)$sum{t, valcap(i,v,r,t) } = yes ; + valgen_irt(i,r,t) = sum{v, valgen(i,v,r,t) } ; + valinv_irt(i,r,t) = sum{v, valinv(i,v,r,t) } ; + valinv_tg(st,tg,t)$sum{(i,r)$[tg_i(tg,i)$r_st(r,st)], valinv_irt(i,r,t) } = yes ; + +) ; + + +$ontext +* the removal of these computed sets would be more complete +* but the vintage-agnostic approach does not allow for their proper representation +* -however- these only apply as constraint generation conditions and +* will not create free/unbounded variables within the model + + valinv_irt(i,r,t)$[valinv_irt(i,r,t)$ + sum{v, valcap_remove(i,v,r,t,"%cur_year%")}] = no ; + + valinv_tg(st,tg,t)$[valinv_tg(st,tg,t) + $sum{(i,v,r)$[tg_i(tg,i)$r_st(r,st)], + valcap_remove(i,v,r,t,"%cur_year%")}] = no ; + +$offtext *record objective function values right after solve z_rep(t)$tmodel(t) = Z.l ; z_rep_inv(t)$tmodel(t) = Z_inv.l(t) ; diff --git a/e_report.gms b/e_report.gms index 0d015d9..87e20c2 100755 --- a/e_report.gms +++ b/e_report.gms @@ -373,7 +373,7 @@ reqt_quant('state_rps',RPSCat,r,'ann',t)$tmodel_new(t) = + ( sum{(i,v)$[valgen(i,v,r,t)$(not storage_standalone(i))], GEN.l(i,v,r,h,t) - (distloss * GEN.l(i,v,r,h,t))$(distpv(i) or dupv(i)) - - (STORAGE_IN_PVB_G.l(i,v,r,h,t) * storage_eff_pvb_g(i,t))$[pvb(i)$Sw_PVB] } + - (STORAGE_IN_GRID.l(i,v,r,h,t) * storage_eff_pvb_g(i,t))$[storage_hybrid(i)$(not csp(i))$Sw_HybridPlant] } - can_exports_h(r,h,t)$[(Sw_Canada=1)$sameas(RPSCat,"CES")] )$(RecStyle(st,RPSCat)=2) )} ; @@ -421,6 +421,7 @@ tran_hurdle_cost_ann(r,rr,trtype,t)$[tmodel_new(t)$routes(r,rr,trtype,t)$cost_hu *======================================== rec_outputs(RPSCat,i,st,ast,t)$[stfeas(st)$(stfeas(ast) or sameas(ast,"voluntary"))$tmodel_new(t)] = RECS.l(RPSCat,i,st,ast,t) ; +acp_purchases_out(rpscat,st,t) = ACP_PURCHASES.l(RPSCat,st,t) ; ptc_out(i,v,t)$[tmodel_new(t)$ptc_value_scaled(i,v,t)] = ptc_value_scaled(i,v,t) * tc_phaseout_mult(i,v,t) ; *======================================== @@ -595,11 +596,11 @@ gen_ann(i,r,t)$tmodel_new(t) = sum{h, gen_h(i,r,h,t) * hours(h) } ; * Report generation without the charging, DR, and production included as above gen_ivrt(i,v,r,t)$valgen(i,v,r,t) = sum{h, GEN.l(i,v,r,h,t) * hours(h) } ; -gen_ivrt_uncurt(i,v,r,t)$[(vre(i) or pvb(i))$valgen(i,v,r,t)] = +gen_ivrt_uncurt(i,v,r,t)$[(vre(i) or storage_hybrid(i)$(not csp(i)))$valgen(i,v,r,t)] = sum{h, m_cf(i,v,r,h,t) * CAP.l(i,v,r,t) * hours(h) } ; -stor_inout(i,v,r,t,"in")$[valgen(i,v,r,t)$storage(i)$[not pvb(i)]] = sum{h, STORAGE_IN.l(i,v,r,h,t) * hours(h) } ; +stor_inout(i,v,r,t,"in")$[valgen(i,v,r,t)$storage(i)$[not storage_hybrid(i)$(not csp(i))]] = sum{h, STORAGE_IN.l(i,v,r,h,t) * hours(h) } ; stor_inout(i,v,r,t,"out")$[valgen(i,v,r,t)$storage(i)] = gen_ivrt(i,v,r,t) ; -stor_in(i,v,r,h,t)$[storage(i)$valgen(i,v,r,t)$(not pvb(i))] = STORAGE_IN.l(i,v,r,h,t) ; +stor_in(i,v,r,h,t)$[storage(i)$valgen(i,v,r,t)$(not storage_hybrid(i)$(not csp(i)))] = STORAGE_IN.l(i,v,r,h,t) ; stor_out(i,v,r,h,t)$[storage(i)$valgen(i,v,r,t)] = GEN.l(i,v,r,h,t) ; stor_level(i,v,r,h,t)$[valgen(i,v,r,t)$storage(i)] = STORAGE_LEVEL.l(i,v,r,h,t) ; @@ -642,17 +643,17 @@ opres_trade(ortype,r,rr,t)$[opres_routes(r,rr,t)$tmodel_new(t)] = * LOSSES AND CURTAILMENT *========================= -gen_new_uncurt(i,r,h,t)$[(vre(i) or pvb(i))$valcap_irt(i,r,t)] = +gen_new_uncurt(i,r,h,t)$[(vre(i) or storage_hybrid(i)$(not csp(i)))$valcap_irt(i,r,t)] = sum{v$valinv(i,v,r,t), (INV.l(i,v,r,t) + INV_REFURB.l(i,v,r,t)) * m_cf(i,v,r,h,t) * hours(h) } ; * Formulation follows eq_curt_gen_balance(r,h,t); since it uses =g= there may be extra curtailment * beyond CURT.l(r,h,t) so we recalculate as (availability - generation - operating reserves) curt_h(r,h,t)$tmodel_new(t) = - sum{(i,v)$[valcap(i,v,r,t)$(vre(i) or pvb(i))], + sum{(i,v)$[valcap(i,v,r,t)$(vre(i) or storage_hybrid(i)$(not csp(i)))], m_cf(i,v,r,h,t) * CAP.l(i,v,r,t) } - sum{(i,v)$[valgen(i,v,r,t)$vre(i)], GEN.l(i,v,r,h,t) } - - sum{(i,v)$[valgen(i,v,r,t)$pvb(i)], GEN_PVB_P.l(i,v,r,h,t) }$Sw_PVB + - sum{(i,v)$[valgen(i,v,r,t)$storage_hybrid(i)$(not csp(i))], GEN_PLANT.l(i,v,r,h,t) }$Sw_HybridPlant - sum{(ortype,i,v)$[Sw_OpRes$opres_h(h)$reserve_frac(i,ortype)$valgen(i,v,r,t)$vre(i)], OPRES.l(ortype,i,v,r,h,t) } ; @@ -674,9 +675,9 @@ curt_rate_tech(i,r,t)$[tmodel_new(t)$vre(i)$(gen_ann(i,r,t) + curt_tech(i,r,t))] curt_rate(t) $[tmodel_new(t) - $(sum{(i,r)$[vre(i) or pvb(i)], gen_ann(i,r,t) } + sum{r, curt_ann(r,t) })] + $(sum{(i,r)$[vre(i) or storage_hybrid(i)$(not csp(i))], gen_ann(i,r,t) } + sum{r, curt_ann(r,t) })] = sum{r, curt_ann(r,t) } - / (sum{(i,r)$[vre(i) or pvb(i)], gen_ann(i,r,t) } + sum{r, curt_ann(r,t) }) ; + / (sum{(i,r)$[vre(i) or storage_hybrid(i)$(not csp(i))], gen_ann(i,r,t) } + sum{r, curt_ann(r,t) }) ; losses_ann('storage',t)$tmodel_new(t) = sum{(i,v,r,h)$[valcap(i,v,r,t)$storage_standalone(i)], STORAGE_IN.l(i,v,r,h,t) * hours(h) } - sum{(i,v,r,h)$[valcap(i,v,r,t)$storage_standalone(i)], GEN.l(i,v,r,h,t) * hours(h) } ; @@ -801,33 +802,33 @@ cap_sdbin_out(i,r,ccseason,sdbin,t)$valcap_irt(i,r,t) = sum{v, CAP_SDBIN.l(i,v,r * energy capacity of storage stor_energy_cap(i,v,r,t)$[tmodel_new(t)$valcap(i,v,r,t)] = - storage_duration(i) * CAP.l(i,v,r,t) * (1$CSP_Storage(i) + 1$psh(i) + bcr(i)$[battery(i) or pvb(i)]) ; + storage_duration(i) * CAP.l(i,v,r,t) * (1$CSP_Storage(i) + 1$psh(i) + bcr(i)$[battery(i) or storage_hybrid(i)$(not csp(i))]) ; *================================== * CAPACITY CREDIT AND FIRM CAPACITY *================================== cc_all_out(i,v,r,ccseason,t)$tmodel_new(t) = - cc_int(i,v,r,ccseason,t)$[(vre(i) or csp(i) or storage(i) or pvb(i))$valcap(i,v,r,t)] + - m_cc_mar(i,r,ccseason,t)$[(vre(i) or csp(i) or storage(i) or pvb(i))$valinv(i,v,r,t)]+ + cc_int(i,v,r,ccseason,t)$[(vre(i) or csp(i) or storage(i) or storage_hybrid(i)$(not csp(i)))$valcap(i,v,r,t)] + + m_cc_mar(i,r,ccseason,t)$[(vre(i) or csp(i) or storage(i) or storage_hybrid(i)$(not csp(i)))$valinv(i,v,r,t)]+ m_cc_dr(i,r,ccseason,t)$[demand_flex(i)$valinv(i,v,r,t)] ; -cap_new_cc(i,r,ccseason,t)$[(vre(i) or storage(i) or pvb(i))$valcap_irt(i,r,t)] = sum{v$ivt(i,v,t),cap_new_ivrt(i,v,r,t) } ; +cap_new_cc(i,r,ccseason,t)$[(vre(i) or storage(i) or storage_hybrid(i)$(not csp(i)))$valcap_irt(i,r,t)] = sum{v$ivt(i,v,t),cap_new_ivrt(i,v,r,t) } ; cc_new(i,r,ccseason,t)$[valcap_irt(i,r,t)$cap_new_cc(i,r,ccseason,t)] = sum{v$ivt(i,v,t), cc_all_out(i,v,r,ccseason,t) } ; cap_firm(i,r,ccseason,t)$[valcap_irt(i,r,t)$[not consume(i)]$tmodel_new(t)] = - sum{v$[(not vre(i))$(not hydro(i))$(not storage(i))$(not pvb(i))$(not demand_flex(i))$valcap(i,v,r,t)], + sum{v$[(not vre(i))$(not hydro(i))$(not storage(i))$(not storage_hybrid(i)$(not csp(i)))$(not demand_flex(i))$valcap(i,v,r,t)], CAP.l(i,v,r,t) * (1 + ccseason_cap_frac_delta(i,v,r,ccseason,t)) } + cc_old(i,r,ccseason,t) - + sum{v$[(vre(i) or csp(i) or pvb(i))$valinv(i,v,r,t)], + + sum{v$[(vre(i) or csp(i) or storage_hybrid(i)$(not csp(i)))$valinv(i,v,r,t)], m_cc_mar(i,r,ccseason,t) * (INV.l(i,v,r,t) + INV_REFURB.l(i,v,r,t)$[refurbtech(i)$Sw_Refurb]) } - + sum{v$[(vre(i) or csp(i) or pvb(i))$valcap(i,v,r,t)], + + sum{v$[(vre(i) or csp(i) or storage_hybrid(i)$(not csp(i)))$valcap(i,v,r,t)], cc_int(i,v,r,ccseason,t) * CAP.l(i,v,r,t) } + sum{v$demand_flex(i), m_cc_dr(i,r,ccseason,t) * CAP.l(i,v,r,t) } - + cc_excess(i,r,ccseason,t)$[(vre(i) or csp(i) or pvb(i))] + + cc_excess(i,r,ccseason,t)$[(vre(i) or csp(i) or storage_hybrid(i)$(not csp(i)))] + sum{(v,h)$[hydro_nd(i)$valgen(i,v,r,t)$h_ccseason_prm(h,ccseason)], GEN.l(i,v,r,h,t) } + sum{v$[hydro_d(i)$valcap(i,v,r,t)], @@ -1155,8 +1156,8 @@ systemcost_techba('op_ptc_payments_negative',i,r,t)$tmodel_new(t) = * Startup/ramping costs systemcost_techba('op_startcost',i,r,t)$[tmodel_new(t)$Sw_StartCost$startcost(i)] = - sum{(v,h,hh)$[numhours_nexth(h,hh)$valgen(i,v,r,t)], - startcost(i) * numhours_nexth(h,hh) * RAMPUP.l(i,v,r,h,hh,t) } + sum{(h,hh)$[numhours_nexth(h,hh)$valgen_irt(i,r,t)], + startcost(i) * numhours_nexth(h,hh) * RAMPUP.l(i,r,h,hh,t) } ; diff --git a/e_report_params.csv b/e_report_params.csv index f1a16b0..4df96d0 100644 --- a/e_report_params.csv +++ b/e_report_params.csv @@ -1,6 +1,7 @@ # Full-line and line-end comments can be indicated with # (but use the comment column when possible) # This parameter list is in alphabetical order - please add new entries that way param,units,comment,reeds2x,output_rename,input +"acp_purchases_out(rpscat,st,t)",MWh,Annual alternative compliance credits from the variable ACP_PURCHASES "avg_cf(i,v,r,t)",frac,Annual average capacity factor for rsc technologies,,, "avg_avail(i,v)",frac,Annual average avail factor,,, "bioused_out(bioclass,r,t)",dry tons (imperial),biomass used by class in each model region (-> bioused.csv),,, diff --git a/hourlize/inputs/configs/srun_template.sh b/hourlize/inputs/configs/srun_template.sh index 34db949..8c1e171 100644 --- a/hourlize/inputs/configs/srun_template.sh +++ b/hourlize/inputs/configs/srun_template.sh @@ -4,4 +4,4 @@ #SBATCH --ntasks-per-node=1 #SBATCH --mail-user=[your email] #SBATCH --mail-type=BEGIN,END,FAIL -#SBATCH --mem=250000 # RAM in MB +#SBATCH --mem=248000 # RAM in MB diff --git a/hourlize/load.py b/hourlize/load.py index 58debfa..bbc604e 100644 --- a/hourlize/load.py +++ b/hourlize/load.py @@ -153,7 +153,7 @@ def process_hourly(df_hr_input, load_source_timezone, paths, hourly_out_years, s print('Splicing in default load before ' + str(use_default_before_yr)) #Read in hierarchy to map census division / state to BA df_hier = pd.read_csv(os.path.join(outpath, 'inputs', 'hierarchy.csv')) - df_hier = df_hier.rename(columns= {'*county':'county', 'ba' : 'r'}) + df_hier = df_hier.rename(columns= {'ba' : 'r'}) #Read in load multipliers df_loadgrowth = pd.read_csv(cf.aeo_default) if 'cendiv' in df_loadgrowth.columns: diff --git a/hourlize/resource.py b/hourlize/resource.py index 32d3545..3075497 100644 --- a/hourlize/resource.py +++ b/hourlize/resource.py @@ -588,8 +588,10 @@ def map_supplycurve( logging.getLogger(i).setLevel(logging.CRITICAL) import pandas as pd import matplotlib.pyplot as plt - import os, site + import os + import site import geopandas as gpd + import cmocean os.environ['PROJ_NETWORK'] = 'OFF' site.addsitedir(os.path.join(reedspath,'postprocessing')) @@ -599,7 +601,7 @@ def map_supplycurve( #%%### Format inputs if not cm: - cmap = plt.cm.gist_earth_r + cmap = cmocean.cm.rain else: cmap = cm ms = {'wind-ofs':1.75, 'wind-ons':2.65, 'upv':2.65}[tech] diff --git a/hourlize/tests/data/r2r_expanded/reeds/inputs_case/switches.csv b/hourlize/tests/data/r2r_expanded/reeds/inputs_case/switches.csv index 532219e..f37ff9f 100755 --- a/hourlize/tests/data/r2r_expanded/reeds/inputs_case/switches.csv +++ b/hourlize/tests/data/r2r_expanded/reeds/inputs_case/switches.csv @@ -166,8 +166,8 @@ GSw_PVB,0 GSw_PVB_BIR,25_50_100 GSw_PVB_Dur,4 GSw_PVB_ILR,140_220_220 -GSw_PVB_ITC_Qual_Award,0 -GSw_PVB_ITC_Qual_Constraint,0 +GSw_PVB_BatteryITC,0 +GSw_PVB_Charge_Constraint,0 GSw_PVB_Types,1 GSw_RGGI,1 GSw_RampRate_Gen,0 diff --git a/hourlize/tests/data/r2r_integration/reeds/inputs_case/switches.csv b/hourlize/tests/data/r2r_integration/reeds/inputs_case/switches.csv index c37e038..b4910da 100755 --- a/hourlize/tests/data/r2r_integration/reeds/inputs_case/switches.csv +++ b/hourlize/tests/data/r2r_integration/reeds/inputs_case/switches.csv @@ -146,8 +146,8 @@ GSw_PVB,0 GSw_PVB_BIR,25_50_100 GSw_PVB_Dur,4 GSw_PVB_ILR,140_220_220 -GSw_PVB_ITC_Qual_Award,0 -GSw_PVB_ITC_Qual_Constraint,0 +GSw_PVB_BatteryITC,0 +GSw_PVB_Charge_Constraint,0 GSw_PVB_Types,1 GSw_RECT,1 GSw_RECTupgrade,1 diff --git a/input_processing/LDC_prep.py b/input_processing/LDC_prep.py index 5250985..a5799b6 100644 --- a/input_processing/LDC_prep.py +++ b/input_processing/LDC_prep.py @@ -271,7 +271,7 @@ def main(reeds_path, inputs_case): hierarchy_original = ( pd.read_csv(os.path.join(reeds_path, 'inputs', 'hierarchy.csv')) .rename(columns={'ba':'r'}) - .drop(['*county','county_name'], axis=1).drop_duplicates() + .drop(['county','county_name'], axis=1).drop_duplicates() .set_index('r') ) ### Add ccreg column with the desired hierarchy level @@ -382,9 +382,9 @@ def main(reeds_path, inputs_case): for pvb_type in GSw_PVB_Types: ilr = int(pvb_ilr['pvb{}'.format(pvb_type)] * 100) ### UPV uses ILR = 1.3, so use its profile if ILR = 1.3 - infile = 'upv' if ilr == 130 else f'upv_{ilr}AC' + infile = f'upv-{GSw_SitingUPV}_ba' if ilr == 130 else f'upv_{ilr}AC_ba-reference' df_pvb[pvb_type] = read_file( - os.path.join(path_variability, 'multi_year', 'f{infile}-{GSw_SitingUPV}')) + os.path.join(path_variability, 'multi_year', infile)) df_pvb[pvb_type].columns = [f'pvb{pvb_type}_{c}' for c in df_pvb[pvb_type].columns] df_pvb[pvb_type].index = df_upv.index.copy() @@ -651,6 +651,8 @@ def main(reeds_path, inputs_case): for fips, row in fracdata.iterrows()}, axis=1, ) + # Filter by regions again for cases when only a subset of a model balancing area is represented + load_eastern = load_eastern.loc[:,load_eastern.columns.isin(val_r_all)].copy() #%% Calculate coincident peak demand at different levels for convenience later _peakload = {} diff --git a/input_processing/WriteHintage.py b/input_processing/WriteHintage.py index cfd059a..b28b99d 100644 --- a/input_processing/WriteHintage.py +++ b/input_processing/WriteHintage.py @@ -519,6 +519,9 @@ def main(reeds_path, inputs_case): dpv = dpv[['Summer.capacity']] # Put back in original format dpv = dpv.reset_index().sort_values(['r','year']) + + # Filter by regions again for cases when only a subset of a model balancing area is represented + dpv = dpv.loc[dpv['r'].isin(val_r_all)] elif agglevel in ['state','aggreg']: # or any other spatial resolution above 'BA' dpv['r'] = dpv['r'].map(r_ba) dpv = dpv.groupby(['r','year'], as_index=False).sum() diff --git a/input_processing/aggregate_regions.py b/input_processing/aggregate_regions.py index f1b2246..134aead 100644 --- a/input_processing/aggregate_regions.py +++ b/input_processing/aggregate_regions.py @@ -41,10 +41,9 @@ inputs_case = os.path.join(args.inputs_case) # #%%## Settings for testing -# reeds_path = os.path.expanduser('~/github2/ReEDS-2.0') -# reeds_path = os.getcwd() +# reeds_path = reeds_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # inputs_case = os.path.join( -# reeds_path,'runs','nd16_ND','inputs_case') +# reeds_path,'runs','v20240416_compareM0_USA_agg','inputs_case') #%% Settings for debugging ### Set debug == True to copy the original files to a new folder (inputs_case_original). @@ -81,6 +80,13 @@ agglevel = pd.read_csv( os.path.join(inputs_case,'agglevels.csv')).squeeze(1).tolist()[0] +# Regions present in the current run +val_r_all = sorted( + pd.read_csv( + os.path.join(inputs_case, 'val_r_all.csv'), header=None, + ).squeeze(1).tolist() +) + #%% =========================================================================== ### --- FUNCTIONS AND DICTIONARIES --- ### =========================================================================== @@ -191,6 +197,14 @@ def logprint(filepath, message): ## Add it to rscweight_nobin rscweight_nobin = rscweight.groupby(['i','r'], as_index=False).sum() rscweight_nobin = pd.concat([rscweight_nobin, distpvcap.reset_index().assign(i='distpv')], axis=0) + ## Remove duplicate CSP values for different solar multiples + rscweight_csp = rscweight_nobin.copy() + rscweight_csp.i.replace( + {f'csp{i+1}_{c+1}': f'csp_{c+1}' + for i in range(int(sw.GSw_CSP)) + for c in range(int(sw.GSw_NumCSPclasses))}, + inplace=True) + rscweight_csp.drop_duplicates(['i','r','rscbin'], inplace=True) #%% Get the mapping to reduced-resolution technology classes original_num_classes = {**{'dupv':7}, **{f'csp{i}':12 for i in range(1,5)}} @@ -326,7 +340,7 @@ def logprint(filepath, message): # the case, then skip them. try: dfin = pd.read_csv(os.path.join(inputs_case, filepath), header=header) - except: + except Exception: continue elif filetype == '.h5': dfin = pd.read_hdf(os.path.join(inputs_case, filepath)) @@ -395,7 +409,7 @@ def logprint(filepath, message): ## Turn index into columns .reset_index() ) - + #%% If the file is empty, move on to the next one as there is nothing to aggregate if df.empty: if verbose > 1: @@ -460,7 +474,7 @@ def logprint(filepath, message): ### Special case: If calculating capacity credit by r, replace ccreg with r if sw['capcredit_hierarchy_level'] == 'r': df1 = df1.assign(ccreg=df1.r).drop_duplicates() - elif aggfunc == 'recf': + elif aggfunc in ['recf','csp']: ### Special case: Region is embedded in the 'resources' column as {tech}_{region} col2r = dict(zip(columns, [c.split('_')[-1] for c in columns])) col2i = dict(zip(columns, ['_'.join(c.split('_')[:-1]) for c in columns])) @@ -468,14 +482,18 @@ def logprint(filepath, message): df1['r'] = df1[region_col].map(col2r) df1['i'] = df1[region_col].map(col2i) ## Get capacities - df1 = df1.merge(rscweight_nobin, on=['r','i'], how='left') + df1 = df1.merge( + (rscweight_csp if aggfunc == 'csp' else rscweight_nobin), + on=['r','i'], how='left', + ) ## Similar procedure as above for aggfunc == 'sc_cat' df1['i'] = df1['i'].map(lambda x: new_classes.get(x,x)) df1 = ( - df1.assign(r=df1.r.map(r_ba)) - .assign(cap_times_cf=df1.cf*df1.MW) - .groupby(['index','i','r']).sum() - ) + df1 + .assign(r=df1.r.map(r_ba)) + .assign(cap_times_cf=df1.cf*df1.MW) + .groupby(['index','i','r']).sum() + ) df1.cf = df1.cap_times_cf / df1.MW df1 = df1.rename(columns={'cf':'value'}).reset_index() ## Remake the resources (column names) with new regions @@ -483,7 +501,7 @@ def logprint(filepath, message): df1 = df1.set_index(['index','wide'])[['value']].astype(np.float16) elif aggfunc in ['sum','mean','first','min']: df1 = df1.groupby(fix_cols+region_cols).agg(aggfunc) - + ### Disaggregation methods -------------------------------------------------------------------- elif aggfunc == 'uniform': for rcol in region_cols: @@ -498,7 +516,6 @@ def logprint(filepath, message): else: df1.set_index(region_cols,inplace=True) elif aggfunc in ['population','geosize','translinesize','hydroexist']: - if 'sc_cat' in columns: # Split cap and cost df1_cap = df1[df1['sc_cat']=='cap'] @@ -530,11 +547,10 @@ def logprint(filepath, message): df1_cost.rename(columns={'new_value':valcol,'FIPS':rcol},inplace=True) df1_cost.set_index(df1cols[:-1],inplace=True) df1_cost = df1_cost[[valcol]] - + # Combine cap and cost to get back into original format df1 = pd.concat([df1_cap, df1_cost]) - else: # Disaggregate cap using the selected aggfunc fracdata = disagg_data[aggfunc] @@ -557,7 +573,14 @@ def logprint(filepath, message): else: raise ValueError(f'Invalid choice of aggfunc: {aggfunc} for {filename}') - + ## Filter by regions again for cases when only a subset of a model balancing area is represented + if agglevel == 'county': + if region_col == '*r,rr': + df1 = df1.loc[df1.index.get_level_values('*r').isin(val_r_all)] + df1 = df1.loc[df1.index.get_level_values('rr').isin(val_r_all)] + else: + df1 = df1.loc[df1.index.get_level_values(region_col).isin(val_r_all)] + #%%################################ ### Put back in original format ### diff --git a/input_processing/calc_financial_inputs.py b/input_processing/calc_financial_inputs.py index 65564ac..991b62e 100644 --- a/input_processing/calc_financial_inputs.py +++ b/input_processing/calc_financial_inputs.py @@ -15,7 +15,7 @@ ### --- FUNCTIONS --- ### =========================================================================== -def calc_financial_inputs(reeds_path, inputs_case): +def calc_financial_inputs(inputs_case): """ Write the following files to runs/{batch_case}/inputs_case/: - ivt.csv @@ -49,36 +49,30 @@ def calc_financial_inputs(reeds_path, inputs_case): os.path.join(inputs_case,'scalars.csv'), header=None, names=['scalar','value','comment'], index_col='scalar')['value'] - input_dir = os.path.join(reeds_path, 'inputs') - #%% Import some general data and maps # Import inflation (which includes both historical and future inflation). # Used for adjusting currency inputs to the specified dollar year, and financial calculations. - inflation_df = pd.read_csv(os.path.join( - input_dir, 'financials', 'inflation_%s.csv' % sw['inflation_suffix'])) + inflation_df = pd.read_csv(os.path.join(inputs_case,'inflation.csv')) # Import tech groups. Used to expand data inputs # (e.g., 'UPV' expands to all of the upv subclasses, like upv_1, upv_2, etc) - tech_groups = sFuncs.import_tech_groups(os.path.join(input_dir, 'tech-subset-table.csv')) + tech_groups = sFuncs.import_tech_groups(os.path.join(inputs_case, 'tech-subset-table.csv')) # Set up scen_settings object scen_settings = sFuncs.scen_settings( - dollar_year=int(sw['dollar_year']), tech_groups=tech_groups, input_dir=input_dir, + dollar_year=int(sw['dollar_year']), tech_groups=tech_groups, inputs_case=inputs_case, sw=sw) #%% Ingest data, determine what regions have been specified, and build df_ivt # Build df_ivt (for calculating various parameters at subscript [i,v,t]) - techs = pd.read_csv( - os.path.join(input_dir, 'techs', 'techs_%s.csv' % sw['techs_suffix'])) + techs = pd.read_csv(os.path.join(inputs_case,'techs.csv')) techs = sFuncs.expand_GAMS_tech_groups(techs) vintage_definition = pd.read_csv(os.path.join(inputs_case, 'ivt.csv')).rename(columns={'Unnamed: 0':'i'}) - annual_degrade = pd.read_csv( - os.path.join(input_dir,'degradation', - 'degradation_annual_%s.csv' % sw['degrade_suffix']), + annual_degrade = pd.read_csv(os.path.join(inputs_case,'degradation_annual.csv'), header=None, names=['i','annual_degradation']) annual_degrade = sFuncs.expand_GAMS_tech_groups(annual_degrade) ### Assign the PV+battery values to values for standalone batteries @@ -89,8 +83,6 @@ def calc_financial_inputs(reeds_path, inputs_case): years, modeled_years, year_map = sFuncs.ingest_years( inputs_case, sw['sys_eval_years'], sw['endyear']) - val_r_all = pd.read_csv(os.path.join(inputs_case,'val_r_all.csv'), header = None)[0].tolist() - df_ivt = sFuncs.build_dfs(years, techs, vintage_definition, year_map) print('df_ivt created for', inputs_case) @@ -127,7 +119,7 @@ def calc_financial_inputs(reeds_path, inputs_case): dfin=financials_tech, tech_to_copy='battery_{}'.format(scen_settings.sw['GSw_PVB_Dur'])) # If the battery in PV+B gets the ITC, it gets 5-year MACRS depreciation as well - if float(scen_settings.sw['GSw_PVB_ITC_Qual_Award']) >= 0.75: + if float(scen_settings.sw['GSw_PVB_BatteryITC']) >= 0.75: financials_tech.loc[ financials_tech.i.str.startswith('pvb') & (financials_tech.country == 'usa'), 'depreciation_sch' @@ -213,12 +205,8 @@ def calc_financial_inputs(reeds_path, inputs_case): #%% # Import schedules for financial calculations - construction_schedules = pd.read_csv(os.path.join( - input_dir, 'financials', - 'construction_schedules_%s.csv' % sw['construction_schedules_suffix'])) - depreciation_schedules = pd.read_csv(os.path.join( - input_dir, 'financials', - 'depreciation_schedules_%s.csv' % sw['depreciation_schedules_suffix'])) + construction_schedules = pd.read_csv(os.path.join(inputs_case,'construction_schedules.csv')) + depreciation_schedules = pd.read_csv(os.path.join(inputs_case,'depreciation_schedules.csv')) ### Calculate financial multipliers print('Calculating financial multipliers for', inputs_case, '...') @@ -228,11 +216,7 @@ def calc_financial_inputs(reeds_path, inputs_case): #%%### Calculate financial multipliers for transmission ### Load transmission data - dftrans = pd.read_csv( - os.path.join( - input_dir, 'financials', - 'financials_transmission_{}.csv'.format(sw['financials_trans_suffix'])), - ) + dftrans = pd.read_csv(os.path.join(inputs_case,'financials_transmission.csv')) ### Get transmission capital recovery period (CRP) from input scalars dftrans['eval_period'] = int(scalars['trans_crp']) ### Get online year @@ -274,11 +258,7 @@ def calc_financial_inputs(reeds_path, inputs_case): #%%### Calculate financial multipliers for hydrogen network investments ### Load hydroge data - dfhydrogen = pd.read_csv( - os.path.join( - input_dir, 'financials', - 'financials_hydrogen.csv'), - ) + dfhydrogen = pd.read_csv(os.path.join(inputs_case,'financials_hydrogen.csv')) ### Get hydrogen capital recovery period (CRP) from input scalars # note that pipelines and compressors have different lifetimes dfhydrogen['eval_period_pipeline'] = int(scalars['h2_crp_pipeline']) @@ -330,19 +310,22 @@ def calc_financial_inputs(reeds_path, inputs_case): #%% # Import regional capital cost multipliers, create multipliers for csp configurations reg_cap_cost_mult = sFuncs.import_data( - file_root=f'reg_cap_cost_mult', file_suffix=sw['reg_cap_cost_mult_suffix'], + file_root='reg_cap_cost_mult', file_suffix=sw['reg_cap_cost_mult_suffix'], indices=['i','r'], scen_settings=scen_settings) - + # Apply the values for standalone batteries to PV+B batteries reg_cap_cost_mult = sFuncs.append_pvb_parameters( dfin=reg_cap_cost_mult, tech_to_copy=f'battery_{scen_settings.sw["GSw_PVB_Dur"]}') + # Initialize a copy of reg_cap_cost_mult that only include CSP data reg_cap_cost_mult_csp = reg_cap_cost_mult[reg_cap_cost_mult['i'].str.contains('csp1_')].copy() # Read in techs subset table to determine number of csp configurations - tech_subset_table = pd.read_csv(os.path.join(input_dir, 'tech-subset-table.csv')) + tech_subset_table = pd.read_csv(os.path.join(inputs_case, 'tech-subset-table.csv')) csp_configs = int(len(tech_subset_table.query('CSP == "YES" and STORAGE == "YES"'))) del tech_subset_table + # Iteratively copy and concat CSP data to reg_cap_cost_mult dataframe for each additional + # CSP configurations for i in range(2, csp_configs + 1): configuration = 'csp' + str(i) mult_temp = reg_cap_cost_mult_csp.copy() @@ -351,9 +334,8 @@ def calc_financial_inputs(reeds_path, inputs_case): del mult_temp del reg_cap_cost_mult_csp - # Trim down to just the techs and regions in this run + # Trim down to just the techs in this run reg_cap_cost_mult = reg_cap_cost_mult[reg_cap_cost_mult['i'].isin(list(techs['i']))] - reg_cap_cost_mult = reg_cap_cost_mult[reg_cap_cost_mult['r'].isin(val_r_all)] #%% Before writing outputs, change "x" to "newx" in [v] @@ -435,26 +417,6 @@ def calc_financial_inputs(reeds_path, inputs_case): # pvf_cap (used in both seq and int modes) sFuncs.inv_param_exporter(df_ivt, modeled_years, 'pvf_capital', ['t'], 'pvf_cap', inputs_case) - # Copy input files into inputs_case - depreciation_schedules.to_csv( - os.path.join(inputs_case, 'depreciation_schedules.csv'), index=False) - inflation_df.to_csv(os.path.join(inputs_case, 'inflation.csv'), index=False) - - # Copy construction_times into inputs_case - pd.read_csv( - os.path.join(input_dir, 'financials', - 'construction_times_%s.csv' % sw['construction_times_suffix']) - ).to_csv( - os.path.join(inputs_case, 'construction_times.csv'), index=False) - - # Copy tc_phaseout_schedule into inputs_case - pd.read_csv( - os.path.join(input_dir, 'financials', - 'tc_phaseout_schedule_%s.csv' % sw['GSw_TCPhaseout_schedule']) - ).to_csv( - os.path.join(inputs_case, 'tc_phaseout_schedule.csv'), index=False) - - # Output some values used in the retail rate module retail_eval_period = df_ivt[['i', 't', 'eval_period']].drop_duplicates(['i', 't']) retail_depreciation_sch = df_ivt[ @@ -495,7 +457,7 @@ def calc_financial_inputs(reeds_path, inputs_case): #%% Run it tic = datetime.datetime.now() - calc_financial_inputs(reeds_path, inputs_case) + calc_financial_inputs(inputs_case) toc(tic=tic, year=0, process='input_processing/calc_financial_inputs.py', path=os.path.join(inputs_case,'..')) diff --git a/input_processing/copy_files.py b/input_processing/copy_files.py index 2ee1cc4..07c4ee1 100644 --- a/input_processing/copy_files.py +++ b/input_processing/copy_files.py @@ -20,7 +20,6 @@ import datetime tic = datetime.datetime.now() - #%% Parse arguments parser = argparse.ArgumentParser(description="Copy files needed for this run") parser.add_argument('reeds_path', help='ReEDS directory') @@ -32,8 +31,24 @@ # #%% Settings for testing ### # reeds_path = os.getcwd() -# reeds_path = os.path.join('E:\\','Vincent','ReEDS-2.0_SpFl') -# inputs_case = os.path.join(reeds_path,'runs','mergetest_Western_state','inputs_case','') +# reeds_path = os.path.join('/Users','jcarag','ReEDS','ReEDS-2.0') +# inputs_case = os.path.join(reeds_path,'runs','Mar29_megacopyfilestest_Pacific','inputs_case','') + +#%% Set up logger +log = makelog(scriptname=__file__, logpath=os.path.join(inputs_case,'..','gamslog.txt')) +print('Starting copy_files.py') + +#%% Inputs from switches +sw = pd.read_csv( + os.path.join(inputs_case, 'switches.csv'), header=None, index_col=0).squeeze(1) +# Create switch dictionary that has certain switches evaluated for the values ReEDS actually uses +sw_expanded = {**sw, **{'osprey_num_years':str(len(sw['osprey_years'].split('_')))}} + +solveyears = pd.read_csv( + os.path.join(reeds_path,'inputs','modeledyears.csv'), + usecols=[sw['yearset_suffix']], +).squeeze(1).dropna().astype(int).tolist() +solveyears = [y for y in solveyears if y <= int(sw['endyear'])] #%% Additional inputs casedir = os.path.dirname(inputs_case) @@ -98,12 +113,11 @@ def param_csv_to_txt(path_to_param_csv, writelist=True): return dfparams -#%% Set up logger -log = makelog(scriptname=__file__, logpath=os.path.join(inputs_case,'..','gamslog.txt')) -print('Starting copy_files.py') - -#%% Identify files that have a region index versus those that do not +#%% =========================================================================== +### --- PROCEDURE --- +### =========================================================================== +### Identify files that have a region index versus those that do not runfiles = ( pd.read_csv( os.path.join(reeds_path, 'runfiles.csv'), @@ -131,33 +145,36 @@ def param_csv_to_txt(path_to_param_csv, writelist=True): & (runfiles['post_copy'] != 1)] ) -#%% Copy relevant files from runfiles.csv that do not include regions +#%%########################################## +# -- Copy non-region-indexed Files -- # +############################################# +print('Copying non-region-indexed files') for i,row in nonregionFiles.iterrows(): if row['filepath'].split('/')[0] in ['inputs','postprocessing']: dir_dst = inputs_case else: - dir_dst = casedir + dir_dst = casedir + + # Replace '{switchnames}' in src_file with corresponding switch values src_file = os.path.join(reeds_path, row['filepath']) + src_file = src_file.format(**sw_expanded) + + # Copy the desired file from ReEDS inputs folder to the inputs_case folder if (os.path.exists(src_file)) and (row['filename']!='rev_paths.csv'): - shutil.copy(src_file, dir_dst) + # Special Case: Values in load_multiplier.csv need to be rounded prior to copy + if row['filename'] == 'load_multiplier.csv': + pd.read_csv( + src_file + ).round(6).to_csv(os.path.join(inputs_case,'load_multiplier.csv'),index=False) + else: + shutil.copy(src_file, os.path.join(dir_dst,row['filename'])) #%% Rewrite the scalar and switches tables as GAMS-readable definitions - scalar_csv_to_txt(os.path.join(inputs_case,'scalars.csv')) scalar_csv_to_txt(os.path.join(inputs_case,'gswitches.csv')) ### Do the same for the e_report parameters param_csv_to_txt(os.path.join(inputs_case,'..','e_report_params.csv')) -#%% Inputs from switches -sw = pd.read_csv( - os.path.join(inputs_case, 'switches.csv'), header=None, index_col=0).squeeze(1) - -solveyears = pd.read_csv( - os.path.join(reeds_path,'inputs','modeledyears.csv'), - usecols=[sw['yearset_suffix']], -).squeeze(1).dropna().astype(int).tolist() -solveyears = [y for y in solveyears if y <= int(sw['endyear'])] - #%%########################### # -- Region Mapping -- # ############################## @@ -168,10 +185,7 @@ def param_csv_to_txt(path_to_param_csv, writelist=True): '' if (sw['GSw_HierarchyFile'] == 'default') else '_'+sw['GSw_HierarchyFile'])) ) -# Remove asterisk from the first column -new_column_name = hierarchy.columns[0].replace('*', '') -hierarchy.rename(columns={hierarchy.columns[0]:new_column_name}, - inplace = True) + if not NARIS: hierarchy = hierarchy.loc[hierarchy.country.str.lower()=='usa'].copy() @@ -319,68 +333,7 @@ def param_csv_to_txt(path_to_param_csv, writelist=True): # Export filtered r_cs to r_cs.csv r_cs.to_csv(os.path.join(inputs_case, 'r_cs.csv'), index=False) -#%%##################################### -# -- Write run-specific files -- # -######################################## - -shutil.copy(os.path.join(reeds_path,'inputs','capacitydata', - f'wind-ons_prescribed_builds_{sw.GSw_SitingWindOns}_{lvl}.csv'), - os.path.join(inputs_case,'wind-ons_prescribed_builds.csv')) -shutil.copy(os.path.join(reeds_path,'inputs','capacitydata', - f'wind-ofs_prescribed_builds_{sw.GSw_SitingWindOfs}_{lvl}.csv'), - os.path.join(inputs_case,'wind-ofs_prescribed_builds.csv')) - -### Specific versions of files ### - -osprey_num_years = len(sw['osprey_years'].split('_')) -shutil.copy( - os.path.join( - reeds_path,'inputs','variability',f'index_hr_map_{osprey_num_years}.csv'), - os.path.join(inputs_case,'index_hr_map.csv') -) -shutil.copy( - os.path.join( - reeds_path,'inputs','variability',f'd_szn_{osprey_num_years}.csv'), - os.path.join(inputs_case,'d_szn.csv') -) -shutil.copy( - os.path.join( - reeds_path,'inputs','state_policies',f'offshore_req_{sw["GSw_OfsWindForceScen"]}.csv'), - os.path.join(inputs_case,'offshore_req.csv') -) -shutil.copy( - os.path.join( - reeds_path,'inputs','consume',f'dac_gas_{sw["GSw_DAC_Gas_Case"]}.csv'), - os.path.join(inputs_case,'dac_gas.csv') -) -shutil.copy( - os.path.join( - reeds_path,'inputs','carbonconstraints',f'capture_rates_{sw["GSw_CCS_Rate"]}.csv'), - os.path.join(inputs_case,'capture_rates.csv') -) -shutil.copy( - os.path.join( - reeds_path,'inputs','capacitydata', - f'ReEDS_generator_database_final_{sw["unitdata"]}.csv'), - os.path.join(inputs_case,'unitdata.csv') -) -shutil.copy( - os.path.join( - reeds_path,'inputs','transmission',f'r_rr_adj_{lvl}.csv'), - os.path.join(inputs_case,'r_rr_adj.csv') -) -for f in ['distPVcap','distPVCF_hourly']: - shutil.copy( - os.path.join( - reeds_path,'inputs','dGen_Model_Inputs','{s}','{f}_{s}.csv').format( - f=f, s=sw['distpvscen']), - os.path.join(inputs_case, f'{f}.csv') - ) -pd.read_csv( - os.path.join(reeds_path,'inputs','loaddata',f'demand_{sw["demandscen"]}.csv'), -).round(6).to_csv(os.path.join(inputs_case,'load_multiplier.csv'),index=False) - -### Hourly RE profiles +#%% Hourly RE profiles # The BA-level files are part of the repository, so only need to check for # these files if running at the county-level if agglevel == 'county': @@ -487,13 +440,15 @@ def param_csv_to_txt(path_to_param_csv, writelist=True): ### Files defined from case inputs ### pd.DataFrame( - {'*pvb_type': [f'pvb{i}' for i in range(1,4)], - 'ilr': [np.around(float(c) / 100, 2) for c in sw['GSw_PVB_ILR'].split('_')]} + {'*pvb_type': [f'pvb{i}' for i in sw['GSw_PVB_Types'].split('_')], + 'ilr': [np.around(float(c) / 100, 2) for c in sw['GSw_PVB_ILR'].split('_') + ][0:len(sw['GSw_PVB_Types'].split('_'))]} ).to_csv(os.path.join(inputs_case, 'pvb_ilr.csv'), index=False) pd.DataFrame( - {'*pvb_type': [f'pvb{i}' for i in range(1,4)], - 'bir': [np.around(float(c) / 100, 2) for c in sw['GSw_PVB_BIR'].split('_')]} + {'*pvb_type': [f'pvb{i}' for i in sw['GSw_PVB_Types'].split('_')], + 'bir': [np.around(float(c) / 100, 2) for c in sw['GSw_PVB_BIR'].split('_') + ][0:len(sw['GSw_PVB_Types'].split('_'))]} ).to_csv(os.path.join(inputs_case, 'pvb_bir.csv'), index=False) ### Constant value if input is float, otherwise named profile @@ -526,11 +481,6 @@ def param_csv_to_txt(path_to_param_csv, writelist=True): )[sw['GSw_CarbTaxOption']].rename_axis('*t').round(2).to_csv( os.path.join(inputs_case,'co2_tax.csv') ) -pd.read_csv( - os.path.join(reeds_path,'inputs','reserves','prm_annual.csv'), index_col=['*nercr','t'], -)[sw['GSw_PRM_scenario']].round(5).to_csv( - os.path.join(inputs_case,'prm_annual.csv') -) pd.DataFrame(columns=solveyears).to_csv( os.path.join(inputs_case,'modeledyears.csv'), index=False) pd.read_csv( @@ -599,7 +549,7 @@ def param_csv_to_txt(path_to_param_csv, writelist=True): #%%######################################################## # -- Filter and copy data for files with regions -- # ########################################################### - +print('Copying region-indexed files: filtering for valid regions') for i, row in regionFiles.iterrows(): filepath = row['filepath'] filename = row['filename'] @@ -614,6 +564,11 @@ def param_csv_to_txt(path_to_param_csv, writelist=True): else: full_path = os.path.join(reeds_path,filepath) + # Add 'lvl' to the sw_expanded dictionary + sw_expanded = {**sw_expanded, **{'lvl':lvl}} + # Replace '{switchnames}' in src_file with corresponding switch values + full_path = full_path.format(**sw_expanded) + # Read if file that needs filtered if filetype == '.h5': df = pd.read_hdf(full_path) diff --git a/input_processing/fuelcostprep.py b/input_processing/fuelcostprep.py index e9bbae7..fb27bfd 100644 --- a/input_processing/fuelcostprep.py +++ b/input_processing/fuelcostprep.py @@ -23,8 +23,8 @@ #%% Parse arguments parser = argparse.ArgumentParser(description="""This file organizes fuel cost data by techonology""") -parser.add_argument("reeds_path", help="ReEDS directory") -parser.add_argument("inputs_case", help="output directory") +parser.add_argument("reeds_path", help='ReEDS-2.0 directory') +parser.add_argument("inputs_case", help='ReEDS-2.0/runs/{case}/inputs_case directory') args = parser.parse_args() reeds_path = args.reeds_path @@ -46,20 +46,11 @@ # Load valid regions val_r = pd.read_csv( os.path.join(inputs_case, 'val_r.csv'), header=None).squeeze(1).tolist() -val_r_all = pd.read_csv( - os.path.join(inputs_case, 'val_r_all.csv'), header=None).squeeze(1).tolist() - -# filter natural gas supply curve dimensions -# on val_cendiv, written by copy_files.py -val_cendiv = pd.read_csv(os.path.join(inputs_case,"val_cendiv.csv"), - header=None).squeeze(1).tolist() - -input_dir = os.path.join(reeds_path,'inputs','fuelprices','') r_cendiv = pd.read_csv(os.path.join(inputs_case,"r_cendiv.csv")) -dollaryear = pd.read_csv(os.path.join(input_dir, "dollaryear.csv")) -deflator = pd.read_csv(os.path.join(reeds_path,'inputs','deflator.csv')) +dollaryear = pd.read_csv(os.path.join(inputs_case, "dollaryear_fuel.csv")) +deflator = pd.read_csv(os.path.join(inputs_case,'deflator.csv')) deflator.columns = ["Dollar.Year","Deflator"] dollaryear = dollaryear.merge(deflator,on="Dollar.Year",how="left") @@ -70,7 +61,7 @@ #################### # -- Coal -- # #################### -coal = pd.read_csv(os.path.join(input_dir, f'coal_{sw.coalscen}.csv')) +coal = pd.read_csv(os.path.join(inputs_case, 'coal_price.csv')) coal = coal.melt(id_vars = ['year']).rename(columns={'variable':'cendiv'}) # Adjust prices to 2004$ @@ -80,13 +71,12 @@ coal = coal.merge(r_cendiv,on='cendiv',how='left') coal = coal.drop('cendiv', axis=1) coal = coal[['year','r','value']].rename(columns={'year':'t','value':'coal'}) -coal = coal.loc[coal['r'].isin(val_r_all)] coal.coal = coal.coal.round(6) ####################### # -- Uranium -- # ####################### -uranium = pd.read_csv(os.path.join(input_dir, f'uranium_{sw.uraniumscen}.csv')) +uranium = pd.read_csv(os.path.join(inputs_case, 'uranium_price.csv')) # Adjust prices to 2004$ deflate = dollaryear.loc[dollaryear['Scenario'] == sw.uraniumscen,'Deflator'].values[0] @@ -99,7 +89,7 @@ # -- H2-CT -- # ##################### # note that these fuel inputs are not used when H2 production is run endogenously in ReEDS (GSw_H2 > 0) -h2ct = pd.read_csv(os.path.join(input_dir, f'h2-ct_{sw.h2ctfuelscen}.csv'), index_col='year') +h2ct = pd.read_csv(os.path.join(inputs_case, 'hydrogen_price.csv'), index_col='year') #Adjust prices to 2004$ deflate = dollaryear.loc[dollaryear['Scenario'] == sw.h2ctfuelscen,'Deflator'].squeeze() @@ -116,7 +106,7 @@ # -- Natural Gas -- # ########################### -ngprice = pd.read_csv(os.path.join(input_dir,f'ng_{sw.ngscen}.csv')) +ngprice = pd.read_csv(os.path.join(inputs_case,'ng_price_cendiv.csv')) ngprice = ngprice.melt(id_vars=['year']).rename(columns={'variable':'cendiv'}) # Adjust prices to 2004$ @@ -128,11 +118,10 @@ ngprice_cendiv = ngprice_cendiv.pivot_table(index='cendiv',columns='year',values='value') ngprice_cendiv = ngprice_cendiv.round(6) -# Map cenus regions to BAs +# Map cenus regions to model regions ngprice = ngprice.merge(r_cendiv,on='cendiv',how='left') ngprice = ngprice.drop('cendiv', axis=1) ngprice = ngprice[['year','r','value']].rename(columns={'year':'t','value':'naturalgas'}) -ngprice = ngprice.loc[ngprice['r'].isin(val_r_all)] ngprice.naturalgas = ngprice.naturalgas.round(6) # Combine all fuel data @@ -145,25 +134,21 @@ ### Natural Gas Demand Calculations ### # Natural Gas demand -ngdemand = pd.read_csv(os.path.join(input_dir,f'ng_demand_{sw.ngscen}.csv')) +ngdemand = pd.read_csv(os.path.join(inputs_case,'ng_demand_elec.csv')) ngdemand.index = ngdemand.year ngdemand = ngdemand.drop('year', axis=1) ngdemand = ngdemand.transpose() ngdemand = ngdemand.round(6) # Total Natural Gas demand -ngtotdemand = pd.read_csv(os.path.join(input_dir, f'ng_tot_demand_{sw.ngscen}.csv')) +ngtotdemand = pd.read_csv(os.path.join(inputs_case, 'ng_demand_tot.csv')) ngtotdemand.index = ngtotdemand.year ngtotdemand = ngtotdemand.drop('year', axis=1) ngtotdemand = ngtotdemand.transpose() ngtotdemand = ngtotdemand.round(6) ### Natural Gas Alphas (already in 2004$) -if sw.GSw_GasSector == 'electric_sector': - alpha = pd.read_csv(os.path.join(input_dir, f'alpha_{sw.ngscen}.csv')) -else: - alpha = pd.read_csv( - os.path.join(input_dir, f'alpha_{sw.ngscen}_{sw.GSw_EFS1_AllYearLoad}.csv')) +alpha = pd.read_csv(os.path.join(inputs_case, 'alpha.csv')) alpha = alpha.round(6) #%%################### @@ -171,11 +156,11 @@ ###################### fuel.to_csv(os.path.join(inputs_case,'fprice.csv'),index=False) -ngprice_cendiv.loc[val_cendiv].to_csv(os.path.join(inputs_case,'ng_price_cendiv.csv')) +ngprice_cendiv.to_csv(os.path.join(inputs_case,'ng_price_cendiv.csv')) -ngdemand.loc[val_cendiv].to_csv(os.path.join(inputs_case,'ng_demand_elec.csv')) -ngtotdemand.loc[val_cendiv].to_csv(os.path.join(inputs_case,'ng_demand_tot.csv')) -alpha[['t']+val_cendiv].to_csv(os.path.join(inputs_case,'alpha.csv'),index=False) +ngdemand.to_csv(os.path.join(inputs_case,'ng_demand_elec.csv')) +ngtotdemand.to_csv(os.path.join(inputs_case,'ng_demand_tot.csv')) +alpha.to_csv(os.path.join(inputs_case,'alpha.csv'),index=False) toc(tic=tic, year=0, process='input_processing/fuelcostprep.py', path=os.path.join(inputs_case,'..')) diff --git a/input_processing/hourly_plots.py b/input_processing/hourly_plots.py index c21fca5..8ec6e5e 100644 --- a/input_processing/hourly_plots.py +++ b/input_processing/hourly_plots.py @@ -2,19 +2,22 @@ ### --- IMPORTS --- ### =========================================================================== import os -import sys -import math -import argparse +import site import logging -## Turn off logging for imported packages -for i in ['matplotlib']: - logging.getLogger(i).setLevel(logging.CRITICAL) -import json import pandas as pd import numpy as np -from LDC_prep import read_file +import matplotlib as mpl +import matplotlib.pyplot as plt +from matplotlib import patheffects as pe +import geopandas as gpd +import cmocean + import hourly_repperiods +## Turn off logging for imported packages +for i in ['matplotlib']: + logging.getLogger(i).setLevel(logging.CRITICAL) + #%%################# ### FIXED INPUTS ### interactive = False @@ -26,9 +29,6 @@ def plot_unclustered_periods(profiles, sw, reeds_path, figpath): """ """ - import matplotlib.pyplot as plt - import matplotlib as mpl - import site site.addsitedir(os.path.join(reeds_path,'postprocessing')) import plots plots.plotparams() @@ -60,7 +60,8 @@ def plot_unclustered_periods(profiles, sw, reeds_path, figpath): ax[0].set_xlim(0, nhours) plots.despine(ax) plt.savefig(os.path.join(figpath,'profiles-day_hourly-{}.png'.format(label))) - if interactive: plt.show() + if interactive: + plt.show() plt.close() ### Sequential days, unscaled and scaled @@ -77,7 +78,8 @@ def plot_unclustered_periods(profiles, sw, reeds_path, figpath): ax[row].set_title('{} {}'.format(p,r),x=0.01,ha='left',va='top',pad=0) plots.despine(ax) plt.savefig(os.path.join(figpath,'profiles-year_hourly-{}.png'.format(label))) - if interactive: plt.show() + if interactive: + plt.show() plt.close() ### Daily plt.close() @@ -88,7 +90,8 @@ def plot_unclustered_periods(profiles, sw, reeds_path, figpath): ax[row].set_title('{} {}'.format(p,r),x=0.01,ha='left',va='top',pad=0) plots.despine(ax) plt.savefig(os.path.join(figpath,'profiles-year_daily-{}.png'.format(label))) - if interactive: plt.show() + if interactive: + plt.show() plt.close() @@ -96,7 +99,6 @@ def plot_feature_scatter(profiles_fitperiods, reeds_path, figpath): """ """ import matplotlib.pyplot as plt - import matplotlib as mpl import site site.addsitedir(os.path.join(reeds_path,'postprocessing')) import plots @@ -127,92 +129,8 @@ def plot_feature_scatter(profiles_fitperiods, reeds_path, figpath): plots.despine(ax) plt.savefig(os.path.join(figpath,'feature_scatter.png')) - if interactive: plt.show() - plt.close() - - -def plot_clustered_days( - profiles_fitperiods_hourly, profiles, rep_periods, - forceperiods, sw, reeds_path, figpath): - """ - """ - import matplotlib.pyplot as plt - import matplotlib as mpl - import site - site.addsitedir(os.path.join(reeds_path,'postprocessing')) - import plots - plots.plotparams() - - ### Input processing - numclusters = int(sw['GSw_HourlyNumClusters']) - centroids = profiles.loc[rep_periods] - properties = profiles_fitperiods_hourly.columns.get_level_values('property').unique() - nhours = (len(profiles_fitperiods_hourly.columns.get_level_values('region').unique()) - * (24 if sw['GSw_HourlyType']=='day' else 120)) - ### Plot it - plt.close() - f,ax = plt.subplots( - len(properties), numclusters+len(forceperiods), - sharey='row', sharex=True, figsize=(nhours/12*numclusters/3,6)) - for row, prop in enumerate(properties): - for col in range(numclusters): - profiles_fitperiods_hourly.loc[:,idx==col,:][prop].T.reset_index(drop=True).plot( - ax=ax[row,col], lw=0.2, legend=False) - centroids[prop].T[col].reset_index(drop=True).plot( - ax=ax[row,col], lw=1.5, ls=':', c='k', legend=False) - profiles_fitperiods_hourly.loc[:,nearest_period[col],:][prop].T.reset_index(drop=True).plot( - ax=ax[row,col], lw=1.5, c='k', legend=False) - for col, period in enumerate(forceperiods): - profiles.loc[:,period,:][prop].T.reset_index(drop=True).plot( - ax=ax[row,numclusters+col], lw=1.5, c='k', legend=False) - ax[0,numclusters+col].set_title('{} (d{})'.format(numclusters+col, period)) - ax[row,0].set_ylabel(prop) - ### Formatting - label = ' | '.join( - profiles.columns.get_level_values('region').drop_duplicates().tolist()) - ax[0,0].annotate(label,(0,1.25), xycoords='axes fraction', fontsize='large',) - ax[0,0].xaxis.set_major_locator(mpl.ticker.MultipleLocator(24)) - ax[0,0].xaxis.set_minor_locator(mpl.ticker.MultipleLocator(6)) - ax[0,0].set_xlim(0, nhours) - for col in range(numclusters): - ax[0,col].set_title('{} ({})'.format(col, pd.Series(idx).value_counts()[col])) - ax[-1,col].tick_params(axis='x',labelsize=9) - for row in range(len(properties)): - ax[row,0].set_ylim(0) - for col in range(numclusters+len(forceperiods)): - for x in np.arange(0,nhours+1,24): - ax[row,col].axvline(x,c='k',ls=':',lw=0.3) - plots.despine(ax) - plt.savefig(os.path.join(figpath,'profiles-day_hourly-clustered.png')) - if interactive: plt.show() - plt.close() - - -def plot_clusters_pca(profiles_fitperiods_hourly, sw, reeds_path, figpath): - """ - """ - import matplotlib.pyplot as plt - import matplotlib as mpl - import site - site.addsitedir(os.path.join(reeds_path,'postprocessing')) - import plots - plots.plotparams() - import sklearn.decomposition - - pca = sklearn.decomposition.PCA(n_components=2) - transformed = pd.DataFrame(pca.fit_transform(profiles_fitperiods_hourly)) - colors = plots.rainbowmapper(range(int(sw['GSw_HourlyNumClusters']))) - - plt.close() - f,ax = plt.subplots() - for y in colors: - ax.scatter( - transformed[0][idx==y], transformed[1][idx==y], - color=colors[y], lw=0, s=10) - plots.despine(ax) - plt.savefig(os.path.join(figpath,'clusters-pca-{}totaldays.png'.format( - sw['GSw_HourlyNumClusters']))) - if interactive: plt.show() + if interactive: + plt.show() plt.close() @@ -221,15 +139,10 @@ def plot_ldc( forceperiods_write, sw, reeds_path, figpath): """ """ - import matplotlib.pyplot as plt - import matplotlib as mpl - import site site.addsitedir(os.path.join(reeds_path,'postprocessing')) import plots plots.plotparams() - colors = plots.rainbowmapper(rep_periods, plt.cm.turbo) - ### Get clustered load, repeating representative periods based on how many ### periods they represent numperiods = period_szn.value_counts().rename('numperiods').to_frame() @@ -318,7 +231,8 @@ def plot_ldc( plots.despine(ax) plt.savefig(os.path.join(figpath,'ldc-{}-{}totaldays.png'.format( plotlabel, sw['GSw_HourlyNumClusters']))) - if interactive: plt.show() + if interactive: + plt.show() plt.close() @@ -326,38 +240,38 @@ def plot_maps(sw, inputs_case, reeds_path, figpath): """ """ ### Imports - import matplotlib.pyplot as plt - import matplotlib as mpl - import site - import geopandas as gpd - import shapely site.addsitedir(os.path.join(reeds_path,'postprocessing')) import plots + import reedsplots plots.plotparams() ### Settings cmaps = { - 'cf_full':'turbo', 'cf_hourly':'turbo', 'cf_diff':'bwr', - 'GW_full':'gist_earth_r', 'GW_hourly':'gist_earth_r', - 'GW_diff':'bwr', 'GW_frac':'bwr', 'GW_pct':'bwr', + 'cf_actual':plt.cm.turbo, 'cf_rep':plt.cm.turbo, 'cf_diff':plt.cm.RdBu_r, + 'GW_full':cmocean.cm.rain, 'GW_hourly':cmocean.cm.rain, + 'GW_diff':plt.cm.RdBu_r, 'GW_frac':plt.cm.RdBu_r, 'GW_pct':plt.cm.RdBu_r, } vm = { - 'wind-ons':{'cf_full':(0,0.8),'cf_hourly':(0,0.8),'cf_diff':(-0.05,0.05)}, - 'upv':{'cf_full':(0,0.4),'cf_hourly':(0,0.4),'cf_diff':(-0.05,0.05)}, + 'wind-ons':{'cf_actual':(0,0.6),'cf_rep':(0,0.6),'cf_diff':(-0.05,0.05)}, + 'upv':{'cf_actual':(0,0.3),'cf_rep':(0,0.3),'cf_diff':(-0.05,0.05)}, } vlimload = 0.05 title = ( - 'Algorithm={}, NumClusters={}, RegionLevel={},\n' - 'PeakLevel={}, MinRElevel={}, ClusterWeights={}' + '{}\n' + 'Algorithm={}, NumClusters={}, RegionLevel={}, ClusterWeights={}' ).format( + os.path.abspath(os.path.join(inputs_case,'..')), sw['GSw_HourlyClusterAlgorithm'], sw['GSw_HourlyNumClusters'], sw['GSw_HourlyClusterRegionLevel'], - sw['GSw_HourlyPeakLevel'], sw['GSw_HourlyMinRElevel'], '__'.join(['_'.join([ str(i),str(v)]) for (i,v) in sw['GSw_HourlyClusterWeights'].items()])) techs = ['wind-ons','upv'] - colors = {'cf_full':'k', 'cf_hourly':'C1'} - lss = {'cf_full':':', 'cf_hourly':'-'} - zorders = {'cf_full':10, 'cf_hourly':9} + colors = {'cf_actual':'k', 'cf_rep':'C1'} + lss = {'cf_actual':':', 'cf_rep':'-'} + zorders = {'cf_actual':10, 'cf_rep':9} + + hierarchy = pd.read_csv( + os.path.join(inputs_case, 'hierarchy.csv')).rename(columns={'*r':'r'}).set_index('r') + dfmap = reedsplots.get_dfmap(os.path.abspath(os.path.join(inputs_case,'..'))) ### Get the CF data over all years, take the mean over weather years recf = pd.read_hdf(os.path.join(inputs_case,'recf.h5')) @@ -373,10 +287,6 @@ def plot_maps(sw, inputs_case, reeds_path, figpath): recf.index.map(lambda x: x.year in sw.GSw_HourlyWeatherYears) ].mean() - # ReEDS only supports a single entry for agglevel right now, so use the - # first value from the list (copy_files.py already ensures that only one - # value is present) - # The 'lvl' variable ensures that BA and larger spatial aggregations use BA data and methods agglevel = pd.read_csv( os.path.join(inputs_case, 'agglevels.csv')).squeeze(1).tolist()[0] lvl = 'ba' if agglevel in ['ba','state','aggreg'] else 'county' @@ -404,14 +314,12 @@ def plot_maps(sw, inputs_case, reeds_path, figpath): dfsc['longitude'] = dfsc.sc_point_gid.map(sitemap.longitude) dfsc = plots.df2gdf(dfsc) dfsc['resource'] = dfsc.i + '_' + dfsc.r - dfsc['cf_full'] = dfsc.resource.map(recf) + dfsc['cf_actual'] = dfsc.resource.map(recf) ### Get the BA map dfba = gpd.read_file(os.path.join(reeds_path,'inputs','shapefiles','US_PCA')).set_index('rb') dfba['x'] = dfba.geometry.centroid.x dfba['y'] = dfba.geometry.centroid.y - ### Aggregate to states - dfstates = dfba.dissolve('st') ### Get the hourly data hours = pd.read_csv( @@ -425,41 +333,101 @@ def plot_maps(sw, inputs_case, reeds_path, figpath): index=['i','r'],columns='h',values='cf') cf_hourly = ( (cf_hourly * cf_hourly.columns.map(hours)).sum(axis=1) / hours.sum() - ).rename('cf_hourly').reset_index() + ).rename('cf_rep').reset_index() cf_hourly['resource'] = cf_hourly.i + '_' + cf_hourly.r ### Merge with supply curve, take the difference cfmap = dfsc.assign( - cf_hourly=dfsc.resource.map(cf_hourly.set_index('resource').cf_hourly)).loc[tech].copy() - cfmap['cf_diff'] = cfmap.cf_hourly - cfmap.cf_full + cf_rep=dfsc.resource.map(cf_hourly.set_index('resource').cf_rep)).loc[tech].copy() + cfmap['cf_diff'] = cfmap.cf_rep - cfmap.cf_actual + + ### Calculate the difference at different resolutions + levels = ['r', 'st', 'transgrp', 'transreg', 'interconnect', 'country'] + dfdiffs = {} + for col in levels: + if col != 'r': + cfmap[col] = cfmap.r.map(hierarchy[col]) + dfdiffs[col] = dfmap[col].copy() + df = cfmap.copy() + for i in ['cf_actual','cf_rep']: + df['weighted'] = cfmap[i] * cfmap.capacity + dfdiffs[col][i] = ( + df.groupby(col).weighted.sum() / df.groupby(col).capacity.sum() + ) + dfdiffs[col]['cf_diff'] = dfdiffs[col].cf_rep - dfdiffs[col].cf_actual ### Plot the difference map + nrows, ncols, coords = plots.get_coordinates([ + 'cf_actual', 'cf_rep', 'cf_diff', + 'r', 'st', 'transgrp', + 'transreg', 'interconnect', 'country', + ], aspect=1) + plt.close() - f,ax = plt.subplots(1,3,figsize=(13,4),gridspec_kw={'wspace':-0.05}) - for col in range(3): - dfstates.plot(ax=ax[col], facecolor='none', edgecolor='k', lw=0.25, zorder=10000) - for x,col in enumerate(['cf_full','cf_hourly','cf_diff']): + f,ax = plt.subplots( + nrows, ncols, figsize=(14,9), gridspec_kw={'wspace':-0.05, 'hspace':0}, + ) + ## Absolute and site difference + for col in ['cf_actual','cf_rep','cf_diff']: cfmap.plot( - ax=ax[x], column=col, cmap=cmaps[col], - marker='s', markersize=0.4, lw=0, legend=True, - legend_kwds={'shrink':0.75,'orientation':'horizontal', - 'label':'{} {}'.format(tech,col), 'pad':0}, + ax=ax[coords[col]], column=col, cmap=cmaps[col], + marker='s', markersize=0.35, lw=0, + legend=False, vmin=vm[tech][col][0], vmax=vm[tech][col][1], ) - ax[x].axis('off') - ax[0].set_title(title, y=0.95, x=0.05, ha='left', fontsize=10) - plt.savefig(os.path.join(figpath,'cfmap-{}-{}totaldays.png'.format( - tech.replace('-',''), sw['GSw_HourlyNumClusters']))) - if interactive: plt.show() + dfmap['st'].plot(ax=ax[coords[col]], facecolor='none', edgecolor='k', lw=0.1, zorder=1e6) + ## Colorbar + plots.addcolorbarhist( + f=f, ax0=ax[coords[col]], data=cfmap[col]*100, nbins=51, + cmap=cmaps[col], + vmin=vm[tech][col][0]*100, vmax=vm[tech][col][1]*100, + cbarleft=0.95, cbarbottom=0.1, ticklabel_fontsize=7, + ) + ## Regional differences + for level in levels: + dfdiffs[level].plot( + ax=ax[coords[level]], column='cf_diff', cmap=cmaps['cf_diff'], + vmin=vm[tech]['cf_diff'][0], vmax=vm[tech]['cf_diff'][1], + lw=0, legend=False, + ) + dfmap[level].plot(ax=ax[coords[level]], facecolor='none', edgecolor='k', lw=0.2) + ## Text differences + for r, row in (dfdiffs[level].assign(val=dfdiffs[level].cf_diff.abs()).sort_values('val')).iterrows(): + decimals = 0 if abs(row.cf_diff) >= 1 else 1 + ax[coords[level]].annotate( + f"{row.cf_diff*100:+.{decimals}f}", + [row.labelx, row.labely], + ha='center', va='center', c='k', fontsize={'r':5}.get(level,7), + path_effects=[pe.withStroke(linewidth=1.5, foreground='w', alpha=0.5)], + ) + ## Colorbar + plots.addcolorbarhist( + f=f, ax0=ax[coords[level]], data=dfdiffs[level].cf_diff*100, nbins=51, + cmap=cmaps['cf_diff'], + vmin=vm[tech]['cf_diff'][0]*100, vmax=vm[tech]['cf_diff'][1]*100, + cbarleft=0.95, cbarbottom=0.1, ticklabel_fontsize=7, + ) + ## Formatting + ax[0,0].annotate(title+f', tech={tech}', (0.05,1.05), xycoords='axes fraction', fontsize=10) + for level in coords: + # for row in range(nrows): + # for col in range(ncols): + ax[coords[level]].set_title({'cf_diff':'site'}.get(level,level), y=0.9, weight='bold') + ax[coords[level]].axis('off') + savename = f"cfmap-{tech.replace('-','')}-{sw.GSw_HourlyNumClusters}totaldays.png" + print(savename) + plt.savefig(os.path.join(figpath,savename)) + if interactive: + plt.show() plt.close() ### Plot the distribution of capacity factors plt.close() f,ax = plt.subplots() - for col in ['cf_full','cf_hourly']: + for col in ['cf_actual','cf_rep']: ax.plot( np.linspace(0,100,len(cfmap)), - cfmap.sort_values('cf_full', ascending=False)[col].values, + cfmap.sort_values('cf_actual', ascending=False)[col].values, label=col.split('_')[1], color=colors[col], ls=lss[col], zorder=zorders[col], ) @@ -469,11 +437,14 @@ def plot_maps(sw, inputs_case, reeds_path, figpath): ax.legend(fontsize='large', frameon=False) ax.set_ylabel('{} capacity factor [.]'.format(tech)) ax.set_xlabel('Percent of sites [%]') - ax.set_title(title.replace(' ','\n').replace(',',''), x=0, ha='left', fontsize=10) + ax.set_title( + '\n'.join(title.split('\n')[1:]).replace(' ','\n').replace(',',''), + x=0, ha='left', fontsize=10) plots.despine(ax) plt.savefig(os.path.join(figpath,'cfmapdist-{}-{}totaldays.png'.format( tech.replace('-',''), sw['GSw_HourlyNumClusters']))) - if interactive: plt.show() + if interactive: + plt.show() plt.close() ###### Do it again for load @@ -511,7 +482,7 @@ def plot_maps(sw, inputs_case, reeds_path, figpath): plt.close() f,ax = plt.subplots(1,3,figsize=(13,4),gridspec_kw={'wspace':-0.05}) for col in range(3): - dfstates.plot(ax=ax[col], facecolor='none', edgecolor='k', lw=0.25, zorder=10000) + dfmap['st'].plot(ax=ax[col], facecolor='none', edgecolor='k', lw=0.25, zorder=10000) for x,col in enumerate(['GW_full','GW_hourly','GW_frac']): dfmap.plot( ax=ax[x], column=col, cmap=cmaps[col], legend=True, @@ -524,7 +495,8 @@ def plot_maps(sw, inputs_case, reeds_path, figpath): ax[0].set_title(title, y=0.95, x=0.05, ha='left', fontsize=10) plt.savefig(os.path.join(figpath,'loadmap-{}totaldays.png'.format( sw['GSw_HourlyNumClusters']))) - if interactive: plt.show() + if interactive: + plt.show() plt.close() ### Plot the distribution of load by region @@ -550,14 +522,12 @@ def plot_maps(sw, inputs_case, reeds_path, figpath): plots.despine(ax) plt.savefig(os.path.join(figpath,'loadmapdist-{}totaldays.png'.format( sw['GSw_HourlyNumClusters']))) - if interactive: plt.show() + if interactive: + plt.show() plt.close() -def plot_8760(profiles, period_szn, rep_periods, sw, reeds_path, figpath): - import matplotlib.pyplot as plt - import matplotlib as mpl - import site +def plot_8760(profiles, period_szn, sw, reeds_path, figpath): site.addsitedir(os.path.join(reeds_path,'postprocessing')) import plots plots.plotparams() @@ -614,7 +584,8 @@ def get_profiles(regions, year): '{}: {}'.format(prop,' | '.join(regions)),x=0,ha='left',fontsize=12) ax[0].legend(loc='lower left', bbox_to_anchor=(0,1.5), ncol=2, frameon=False) plt.savefig(os.path.join(figpath,f'8760-allregions-{year}.png')) - if interactive: plt.show() + if interactive: + plt.show() plt.close() ### Load, wind, solar together; original @@ -623,7 +594,8 @@ def get_profiles(regions, year): dforig[['wind-ons','upv']], colors=['#0064ff','#ff0000'], alpha=0.5) plots.plotyearbymonth(dforig['load'], f=f, ax=ax, style='line', colors='k') plt.savefig(os.path.join(figpath,f'8760-allregions-original-{year}.png')) - if interactive: plt.show() + if interactive: + plt.show() plt.close() ### Load, wind, solar together; representative @@ -632,7 +604,8 @@ def get_profiles(regions, year): dfrep[['wind-ons','upv']], colors=['#0064ff','#ff0000'], alpha=0.5) plots.plotyearbymonth(dfrep['load'], f=f, ax=ax, style='line', colors='k') plt.savefig(os.path.join(figpath,f'8760-allregions-representative-{year}.png')) - if interactive: plt.show() + if interactive: + plt.show() plt.close() ###### Individual regions, original vs representative @@ -654,18 +627,16 @@ def get_profiles(regions, year): ax[i*12+i].set_title('{}: {}'.format(prop,region),x=0,ha='left',fontsize=12) ax[0].legend(loc='lower left', bbox_to_anchor=(0,1.5), ncol=2, frameon=False) plt.savefig(os.path.join(figpath,f'8760-{region}-{year}.png')) - if interactive: plt.show() + if interactive: + plt.show() def plots_original( profiles, rep_periods, period_szn, - sw, reeds_path, figpath, make_plots, + sw, reeds_path, figpath, ): """ """ - import matplotlib.pyplot as plt - import matplotlib as mpl - import site site.addsitedir(os.path.join(reeds_path,'postprocessing')) import plots plots.plotparams() @@ -677,22 +648,6 @@ def plots_original( centroid_profiles = centroids * profiles.stack('h_of_period').max() colors = plots.rainbowmapper(list(set(idx_reedsyr)), plt.cm.turbo) - hoursperperiod = {'day':24, 'wek':120, 'year':24}[sw['GSw_HourlyType']] - periodsperyear = {'day':365, 'wek':73, 'year':365}[sw['GSw_HourlyType']] - - ### plot a dendrogram - if make_plots >= 3: - plt.close() - plt.figure(figsize=(12,9)) - plt.title("Dendrogram of Time Clusters") - import scipy - dend = scipy.cluster.hierarchy.dendrogram( - scipy.cluster.hierarchy.linkage(profiles, method='ward'), - color_threshold=7, - ) - plt.gcf().savefig(os.path.join(figpath,'dendrogram.png')) - if interactive: plt.show() - plt.close() ### PLOT ALL DAYS ON SAME X AXIS: try: @@ -746,114 +701,10 @@ def plots_original( ) plots.despine(ax) plt.savefig(os.path.join(figpath,'day_comparison_all.png')) - if interactive: plt.show() + if interactive: + plt.show() plt.close() except Exception as err: print('day_comparison_all.png failed with the following error:\n{}'.format(err)) - ### PLOT LOAD FOR THE ENTIRE ReEDS YEAR COLORED BY CLUSTER AND MEDOID: - if make_plots >= 3: - try: - for year in sw['GSw_HourlyWeatherYears']: - plt.close() - f,ax = plt.subplots(figsize=(14,3.5)) - plotted = [False for i in range(int(sw['GSw_HourlyNumClusters']))] - nationwide_reedsyr_load = profiles_long[ - (profiles_long['year']==year) & (profiles_long['property']=='load') - ].groupby(['year','yperiod','hour'],as_index=False).sum() - nationwide_reedsyr_load['hour_numeric'] = pd.to_numeric( - nationwide_reedsyr_load['hour'].str.lstrip('h')) - nationwide_reedsyr_load.sort_values(['year','hour_numeric'],inplace=True) - for this_yperiod in nationwide_reedsyr_load.yperiod.unique(): - ax.fill_between( - nationwide_reedsyr_load.loc[ - nationwide_reedsyr_load['yperiod']==this_yperiod,'hour_numeric'].to_numpy(), - nationwide_reedsyr_load.loc[ - nationwide_reedsyr_load['yperiod'] == this_yperiod,'value'].to_numpy()/1e3, - ls='-', color=colors[idx_reedsyr[this_yperiod-1]], lw=0, alpha=0.5, - label=( - '{} ({} periods)'.format( - period_szn.loc[period_szn['period']==this_yperiod,'season'].iloc[0], - sum(idx_reedsyr == idx_reedsyr[this_yperiod-1])) - if not plotted[idx_reedsyr[this_yperiod-1]] - else '_nolabel' - ) - ) - plotted[idx_reedsyr[this_yperiod-1]] = True - ### Plot the medoid profiles - for i, (yperiod, row) in enumerate(medoid_profiles.iterrows()): - ax.plot( - list(range((yperiod-1)*hoursperperiod+1,(yperiod)*hoursperperiod+1)), - row['load'].groupby('h_of_period').sum().values/1e3, - color=colors[i], alpha=1, linewidth=1.5, - label='{} Medoid'.format(period_szn.set_index('period').season[int(yperiod)]) - ) - ax.set_xlim(0,8760) - ax.set_ylim(0) - ax.legend( - loc='upper left', bbox_to_anchor=(1,1), ncol=len(colors)//9+1) - ax.set_ylabel('Conterminous US Load (GW)') - ax.set_title('Cluster and Medoid Definitions') - plots.despine(ax) - plt.savefig(os.path.join(figpath,f'year_clusters-load-{year}.png')) - if interactive: plt.show() - plt.close() - except Exception as err: - print('year_clusters.png failed with the following error:\n{}'.format(err)) - - - ### Plot daily profile for the US colored by representative period - try: - ### Create dictionary for assigning month,day to axes row,column - nrows, ncols = (12, 31) if sw['GSw_HourlyType'] in ['day','year'] else (13,6) - coords = dict(zip( - range(1,periodsperyear+1), - [(row,col) for row in range(nrows) for col in range(ncols)] - )) - for year in sw['GSw_HourlyWeatherYears']: - for prop in ['load','upv','wind-ons']: - dfplot = profiles_long[ - (profiles_long['year']==year) & (profiles_long['property']==prop) - ].groupby(['year','yperiod','hour'],as_index=False).sum() - dfplot['hour_numeric'] = pd.to_numeric( - dfplot['hour'].str.lstrip('h')) - dfplot.sort_values(['year','hour_numeric'],inplace=True) - - plt.close() - f,ax = plt.subplots( - nrows, ncols, sharex=True, sharey=True, - gridspec_kw={'wspace':0, 'hspace':0,}, - figsize=(12,6), - ) - for this_yperiod in range(1,periodsperyear+1): - ax[coords[this_yperiod]].fill_between( - range(hoursperperiod), - dfplot.loc[ - dfplot['yperiod'] == this_yperiod,'value'].to_numpy()/1e3, - ls='-', color=colors[idx_reedsyr[this_yperiod]], alpha=0.35, - ) - ### Label the szn - ax[coords[this_yperiod]].annotate( - int(period_szn[this_yperiod][6:]), - (0.5,0), xycoords='axes fraction', va='bottom', ha='center', - fontsize=8, color=colors[idx_reedsyr[this_yperiod]], - ) - ### Plot the medoid profiles - for i, (yperiod, row) in enumerate(medoid_profiles.iterrows()): - ax[coords[yperiod]].plot( - range(hoursperperiod), row[prop].groupby('h_of_period').sum().values/1e3, - color=colors[yperiod], alpha=1, linewidth=2, - ) - for row in range(nrows): - for col in range(ncols): - ax[row,col].axis('off') - ax[0,0].set_title('Cluster and Medoid Definitions', x=0, ha='left') - ax[0,0].set_ylim(0) - ax[0,0].set_xlim(-1,hoursperperiod+1) - plots.despine(ax) - plt.savefig(os.path.join(figpath,f'year_clusters_daily-{prop}.png')) - if interactive: plt.show() - plt.close() - except Exception as err: - print('year_clusters_daily.png failed with the following error:\n{}'.format(err)) diff --git a/input_processing/hourly_writetimeseries.py b/input_processing/hourly_writetimeseries.py index 81029a6..2ccf826 100644 --- a/input_processing/hourly_writetimeseries.py +++ b/input_processing/hourly_writetimeseries.py @@ -147,7 +147,7 @@ def get_ccseason_peaks_hourly(load, sw, reeds_path, inputs_case, hierarchy, h2cc rmap = hierarchy[sw['GSw_PRM_hierarchy_level']] elif agglevel in ['ba','state','aggreg']: hierarchy_orig = (pd.read_csv(os.path.join(reeds_path,'inputs','hierarchy.csv')) - .rename(columns={'*county':'county','st':'state'})) + .rename(columns={'st':'state'})) rmap = (hierarchy_orig[hierarchy_orig['ba'].isin(val_r_all)] [['ba',sw['GSw_PRM_hierarchy_level']]] .drop_duplicates().set_index('ba')).squeeze() @@ -377,11 +377,11 @@ def main(sw, reeds_path, inputs_case, periodtype='rep', make_plots=1, figpathtai """ # #%% Settings for testing # reeds_path = os.path.realpath(os.path.join(os.path.dirname(__file__),'..')) - # inputs_case = os.path.join(reeds_path, 'runs', 'v20240218_stressstorM0_Z45_SP_5yr_H0_Southwest', 'inputs_case') + # inputs_case = os.path.join(reeds_path, 'runs', 'v20240318_stressweightM0_Pacific_stress', 'inputs_case') # sw = pd.read_csv( # os.path.join(inputs_case, 'switches.csv'), header=None, index_col=0).squeeze(1) # periodtype = 'rep' - # periodtype = 'stress2035i1' + # periodtype = 'stress2010i0' # make_plots = int(sw.hourly_cluster_plots) # make_plots = 0 # figpathtail = '' @@ -534,19 +534,14 @@ def main(sw, reeds_path, inputs_case, periodtype='rep', make_plots=1, figpathtai hours = ( hmap_myr.groupby('h').season.count().rename('numhours') / (len(sw['GSw_HourlyWeatherYears']) if periodtype == 'rep' else 1)) - # hours = hmap_myr.groupby('h').season.count().rename('numhours') - # ## Representative periods are normalized by the number of modeled years (so that we can - # ## still act like we're modeling over a single year in ReEDS), - # ## but force-included periods are not; they're always treated as full single periods - # hours.loc[ - # ## exclude the hours that belong to force-included periods - # hours.index.map(lambda x: not any([x.startswith(p) for p in forceperiods_prefix])) - # ] /= len(sw['GSw_HourlyWeatherYears']) + ## Stress period hours are scaled to sum to 6 hours, making 8766 hours (365.25 days) per year + if periodtype != 'rep': + hours = hours / hours.sum() * 6 ### Make sure it lines up if periodtype == 'rep': assert int(np.around(hours.sum(), 0)) % 8760 == 0 else: - assert int(np.around(hours.sum(), 0)) % len(hmap_myr) == 0 + assert np.around(hours.sum(), 0) == 6 # create the timeslice-to-season and timeslice-to-ccseason mappings h_szn = hmap_myr[['h','season']].drop_duplicates().reset_index(drop=True) @@ -980,8 +975,8 @@ def main(sw, reeds_path, inputs_case, periodtype='rep', make_plots=1, figpathtai hmap_1yr=hmap_myr, set_szn=set_szn, inputs_case=inputs_case, drcat='dr') shift_out, dr_shifts = get_dr_shifts( - sw=sw, reeds_path=reeds_path, inputs_case=inputs_case, - native_data=False, hmap_7yr=hmap_7yr, hours=hours, chunkmap=chunkmap) + sw=sw, inputs_case=inputs_case, native_data=False, + hmap_7yr=hmap_7yr, hours=hours, chunkmap=chunkmap) else: dr_inc = pd.DataFrame(columns=['*i','r','h']) dr_dec = pd.DataFrame(columns=['*i','r','h']) diff --git a/input_processing/plantcostprep.py b/input_processing/plantcostprep.py index 84a643c..cd3b9f8 100644 --- a/input_processing/plantcostprep.py +++ b/input_processing/plantcostprep.py @@ -360,7 +360,7 @@ def get_pvb_cost( #%% Calculate PVB cost fraction for each PVB design pvb = {} -for i in range(1,4): +for i in sw['GSw_PVB_Types'].split('_'): pvb['pvb{}'.format(i)] = get_pvb_cost( UPV_defaultILR_USDperWac=UPV_defaultILR_USDperWac, battery_USDperWac=battery_USDperWac, diff --git a/input_processing/support_functions.py b/input_processing/support_functions.py index 9837376..0ff1732 100644 --- a/input_processing/support_functions.py +++ b/input_processing/support_functions.py @@ -14,10 +14,10 @@ #%% class scen_settings(): - def __init__(self, dollar_year, tech_groups, input_dir, sw): + def __init__(self, dollar_year, tech_groups, inputs_case, sw): self.dollar_year = dollar_year self.tech_groups = tech_groups - self.input_dir = input_dir + self.inputs_case = inputs_case self.sw = sw @@ -223,7 +223,7 @@ def import_data(file_root, file_suffix, indices, scen_settings, inflation_df=[], ''' df = pd.read_csv( - os.path.join(scen_settings.input_dir, 'financials', '%s_%s.csv' % (file_root, file_suffix))) + os.path.join(scen_settings.inputs_case, f'{file_root}.csv')) # Expand tech groups, if there is an 'i' column and the argument is True if 'i' in df.columns and expand_tech_groups==True: @@ -246,11 +246,11 @@ def import_data(file_root, file_suffix, indices, scen_settings, inflation_df=[], # Check if a currency_file_root file exists - it should exist if there are # any columns with currency data. If currency data exists, adjust the dollar # year of the input data to the scen_settings's dollar year - if (os.path.isfile(os.path.join(scen_settings.input_dir, file_root, 'currency_%s.csv' % file_root)) + if (os.path.isfile(os.path.join(scen_settings.inputs_case, file_root, f'currency_{file_root}.csv')) and (adjust_units==True) ): currency_meta = pd.read_csv( - os.path.join(scen_settings.input_dir, file_root, 'currency_%s.csv' % file_root), + os.path.join(scen_settings.inputs_case, f'currency_{file_root}.csv'), index_col='file') inflation_df = inflation_df.set_index('t') @@ -308,7 +308,7 @@ def append_pvb_parameters(dfin, tech_to_copy='battery_4', column_scaler=None, pv ------- dfout: pd.DataFrame consisting of PV+B parameters appended to input dataframe. """ - ### Get the PVB classes from upv + ### Get the pvb classes from upv pvb_classes = [i.split('_')[1] for i in dfin.i.unique() if i.startswith('upv')] ### Get values for tech_to_copy copy_params = dfin.set_index('i').loc[[tech_to_copy]].reset_index(drop=True).copy() @@ -372,17 +372,32 @@ def import_and_mod_incentives(incentive_file_suffix, construction_times_suffix, file_root='incentives', file_suffix=incentive_file_suffix, indices=['i','country','t'], inflation_df=inflation_df, scen_settings=scen_settings) - ### Add the hybrid PV+battery incentives (in this case inherited from upv, not battery) + ### Add the hybrid PV+battery incentives + # Always inherit from upv; if upv takes the PTC, pvb will take the PTC on PV generation only, + # and if upv takes the ITC, pvb will take the ITC on all components incentive_df = append_pvb_parameters( dfin=incentive_df, tech_to_copy='upv_1', - column_scaler={'itc_frac': float(scen_settings.sw['GSw_PVB_ITC_Qual_Award'])} - ) + ) + # Inherit from battery if GSw_PVB_BatteryITC = 1 so that the battery component of pvb + # can take the ITC even though the pv component takes the PTC + # Set copy_battery truth value here for use below dealing with duplicate incentives + copy_battery = ( + (float(scen_settings.sw['GSw_PVB_BatteryITC']) > 0) & + (f'battery_{scen_settings.sw["GSw_PVB_Dur"]}' in incentive_df['i'].unique()) + ) + if copy_battery: + incentive_df = append_pvb_parameters( + dfin=incentive_df, tech_to_copy=f'battery_{scen_settings.sw["GSw_PVB_Dur"]}', + column_scaler={'itc_frac': float(scen_settings.sw['GSw_PVB_BatteryITC'])} + ) # Calculate total PTC and ITC value, taking into account bonus # ptc_perc_bonus is a multiplicative increase of the base ptc value. E.g. a value of 0.1 on a $10 ptc value equates to $11 # itc_percpt_bonus is a additive increase of the base itc value. E.g. a value of 0.1 on a 0.3 itc value equates to a 0.4 itc value. incentive_df['ptc_value'] = incentive_df['ptc_value'] * (1.0 + incentive_df['ptc_perc_bonus']) - incentive_df['itc_frac'] = incentive_df['itc_frac'] + incentive_df['itc_percpt_bonus'] + incentive_df.loc[incentive_df['itc_frac']>0,'itc_frac'] = ( + incentive_df.loc[incentive_df['itc_frac']>0,'itc_frac'] + incentive_df.loc[incentive_df['itc_frac']>0,'itc_percpt_bonus'] + ) # Merge with construction start years incentive_df = incentive_df.merge(construction_times, on=['i', 't'], how='left') @@ -438,7 +453,17 @@ def import_and_mod_incentives(incentive_file_suffix, construction_times_suffix, # and selecting the highest. This is not meant to select between competing incentives, as we do not have the operational data # at this point to estimate their value. It is just a simple approach implemented here for lack of time to develop a better one. incentive_df = incentive_df.sort_values('value', ascending=False) - incentive_df = incentive_df.drop_duplicates(['i', 'country', 't'], keep='first') + # Keep duplicate incentives for pvb if the battery takes the ITC + if copy_battery: + incentive_df_pvb = incentive_df[incentive_df['i'].str.contains('pvb')].copy() + incentive_df_pvb = (incentive_df_pvb.groupby(by=['i', 'country', 't']) + .first().reset_index(drop=False)) + incentive_df = ( + incentive_df[~(incentive_df['i'].str.contains('pvb'))] + .drop_duplicates(['i', 'country', 't'], keep='first')) + incentive_df = pd.concat([incentive_df, incentive_df_pvb], ignore_index=True) + else: + incentive_df = incentive_df.drop_duplicates(['i', 'country', 't'], keep='first') incentive_df = incentive_df.fillna(0.0) diff --git a/input_processing/transmission.py b/input_processing/transmission.py index b037db0..3fa2a81 100644 --- a/input_processing/transmission.py +++ b/input_processing/transmission.py @@ -184,7 +184,47 @@ def get_trancap_init(valid_regions, agglevel, networksource='NARIS2024', level=' ### --- PROCEDURE --- ### =========================================================================== -### Get single-link distances and losses +#%% Limits on PRMTRADE across nercr boundaries +solveyears = pd.read_csv( + os.path.join(reeds_path,'inputs','modeledyears.csv'), + usecols=[sw['yearset_suffix']], +).squeeze(1).dropna().astype(int).tolist() +solveyears = [y for y in solveyears if y <= int(sw['endyear'])] + +val_nercr = pd.read_csv( + os.path.join(inputs_case,'val_nercr.csv'), header=None, +).squeeze(1).values +## Take the max over all years for each region and drop negative values +planned_firm_transfers = pd.read_csv( + os.path.join(reeds_path,'inputs','reserves','net_firm_transfers_nerc.csv'), +).pivot(index='t',columns='nercr',values='MW')[val_nercr].max().clip(lower=0).rename('MW') +## Keep planned firm transfers for years before GSw_PRMTRADE_limit to use in the +## eq_firm_transfer_limit / eq_firm_transfer_limit_cc constraints +if any([y < int(sw.GSw_PRMTRADE_limit) for y in solveyears]): + firm_transfer_limit = pd.concat({ + y: planned_firm_transfers + for y in solveyears + if y <= int(sw.GSw_PRMTRADE_limit) + }, names=['t']).reorder_levels(['nercr','t']).rename_axis(['*nercr','t']) +## Otherwise, if GSw_PRMTRADE_limit is set to a value before all solve years (such as 0), +## write an empty dataframe +else: + firm_transfer_limit = pd.DataFrame(columns=['*nercr','t','MW']).set_index(['*nercr','t']) +firm_transfer_limit.to_csv(os.path.join(inputs_case, 'firm_transfer_limit.csv')) + +### Planning reserve margin +( + pd.read_csv( + os.path.join(reeds_path,'inputs','reserves','prm_annual.csv'), + index_col=['*nercr','t']) + [sw['GSw_PRM_scenario']] + ## Fill years before data begin with the first year's data + .unstack('*nercr').reindex(solveyears).fillna(method='bfill').stack('*nercr') + .reorder_levels(['*nercr','t']).loc[val_nercr].round(4) +).to_csv(os.path.join(inputs_case,'prm_annual.csv')) + + +#%% Get single-link distances and losses # Get single-link distances [miles] infiles = {'AC':'500kVac', 'LCC':'500kVdc', 'B2B':'500kVac'} tline_data = pd.concat({ diff --git a/input_processing/writedrshift.py b/input_processing/writedrshift.py index 57ed184..3d4e4fe 100644 --- a/input_processing/writedrshift.py +++ b/input_processing/writedrshift.py @@ -14,7 +14,6 @@ ### =========================================================================== import os import argparse -import shutil import pandas as pd # Time the operation of this script from ticker import toc, makelog @@ -30,7 +29,7 @@ ### --- FUNCTIONS --- ### =========================================================================== -def get_dr_shifts(sw, reeds_path, inputs_case, native_data=True, +def get_dr_shifts(sw, inputs_case, native_data=True, hmap_7yr=None, chunkmap=None, hours=None): """ part of shift demand response handling compatible both with h17 and hourly ReEDS @@ -40,7 +39,7 @@ def get_dr_shifts(sw, reeds_path, inputs_case, native_data=True, dr_hrs = pd.read_csv( os.path.join( - reeds_path, 'inputs', 'demand_response', f"dr_shifts_{sw['drscen']}.csv") + inputs_case, 'dr_shifts.csv') ) # write out dr_hrs for Augur @@ -57,15 +56,10 @@ def get_dr_shifts(sw, reeds_path, inputs_case, native_data=True, #### native_data reads in inputs directly if native_data: - hr_ts = pd.read_csv( - os.path.join(reeds_path, 'inputs', 'variability', 'h_dt_szn.csv')) - hr_ts = hr_ts.loc[(hr_ts['hour'] <= 8760), ['h', 'hour', 'season']] - num_hrs = pd.read_csv( - os.path.join(reeds_path, 'inputs', 'numhours.csv'), - header=0, names=['h', 'numhours'], index_col='h').squeeze(1) hr_ts = pd.read_csv( os.path.join(inputs_case, 'h_dt_szn.csv')) hr_ts = hr_ts.loc[(hr_ts['hour'] <= 8760), ['h', 'hour', 'season']] + num_hrs = pd.read_csv( os.path.join(inputs_case, 'numhours.csv'), header=0, names=['h', 'numhours'], index_col='h').squeeze(1) @@ -129,6 +123,10 @@ def get_dr_shifts(sw, reeds_path, inputs_case, native_data=True, args = parser.parse_args() inputs_case = args.inputs_case reeds_path = args.reeds_path + + # Settings for testing + # reeds_path = os.getcwd() + # inputs_case = os.path.join(reeds_path,'runs','dr1_Pacific','inputs_case') #%% Set up logger log = makelog(scriptname=__file__, logpath=os.path.join(inputs_case,'..','gamslog.txt')) @@ -142,50 +140,52 @@ def get_dr_shifts(sw, reeds_path, inputs_case, native_data=True, ### Read in DR shed for specified scenario dr_shed = pd.read_csv( - os.path.join(args.reeds_path, 'inputs', 'demand_response', f'dr_shed_{drscen}.csv')) + os.path.join(inputs_case, 'dr_shed.csv')) ### Profiles - dr_profile_increase = pd.read_csv( - os.path.join(reeds_path,'inputs','demand_response',f'dr_increase_profile_{sw.drscen}.csv')) - dr_profile_decrease = pd.read_csv( - os.path.join(reeds_path,'inputs','demand_response',f'dr_decrease_profile_{sw.drscen}.csv')) evmc_shape_profile_decrease = pd.read_hdf( - os.path.join(reeds_path,'inputs','demand_response',f'evmc_shape_decrease_profile_{sw.evmcscen}.h5')) + os.path.join(inputs_case,'evmc_shape_decrease_profile.h5')) evmc_shape_profile_increase = pd.read_hdf( - os.path.join(reeds_path,'inputs','demand_response',f'evmc_shape_increase_profile_{sw.evmcscen}.h5')) + os.path.join(inputs_case,'evmc_shape_increase_profile.h5')) evmc_storage_profile_decrease = pd.read_hdf( - os.path.join(reeds_path,'inputs','demand_response',f'evmc_storage_decrease_profile_{sw.evmcscen}.h5')) + os.path.join(inputs_case,'evmc_storage_decrease_profile.h5')) evmc_storage_profile_increase = pd.read_hdf( - os.path.join(reeds_path,'inputs','demand_response',f'evmc_storage_increase_profile_{sw.evmcscen}.h5')) + os.path.join(inputs_case,'evmc_storage_increase_profile.h5')) evmc_storage_energy = pd.read_hdf( - os.path.join(reeds_path,'inputs','demand_response',f'evmc_storage_energy_{sw.evmcscen}.h5')) + os.path.join(inputs_case,'evmc_storage_energy.h5')) ### Filter by regions val_r_all = pd.read_csv( os.path.join(inputs_case, 'val_r_all.csv'), header=None).squeeze(1).tolist() - dr_profile_increase = ( - dr_profile_increase.loc[:,dr_profile_increase.columns.isin(['i','hour','year'] + val_r_all)]) - dr_profile_decrease = ( - dr_profile_decrease.loc[:,dr_profile_decrease.columns.isin(['i','hour','year'] + val_r_all)]) - evmc_shape_profile_decrease = ( - evmc_shape_profile_decrease.loc[:,evmc_shape_profile_decrease.columns.isin(['i','hour','year'] + val_r_all)]) - evmc_shape_profile_increase = ( - evmc_shape_profile_increase.loc[:,evmc_shape_profile_increase.columns.isin(['i','hour','year'] + val_r_all)]) - evmc_storage_profile_decrease = ( - evmc_storage_profile_decrease.loc[:,evmc_storage_profile_decrease.columns.isin(['i','hour','year'] + val_r_all)]) - evmc_storage_profile_increase = ( - evmc_storage_profile_increase.loc[:,evmc_storage_profile_increase.columns.isin(['i','hour','year'] + val_r_all)]) - evmc_storage_energy = ( - evmc_storage_energy.loc[:,evmc_storage_energy.columns.isin(['i','hour','year'] + val_r_all)]) + val_r = pd.read_csv( + os.path.join(inputs_case, 'val_r.csv'), header=None).squeeze(1).tolist() + if int(sw['GSw_EVMC']): + evmc_shape_profile_decrease = ( + evmc_shape_profile_decrease.loc[ + :,evmc_shape_profile_decrease.columns.isin(['i','hour','year'] + val_r_all)]) + evmc_shape_profile_increase = ( + evmc_shape_profile_increase.loc[ + :,evmc_shape_profile_increase.columns.isin(['i','hour','year'] + val_r_all)]) + evmc_storage_profile_decrease = ( + evmc_storage_profile_decrease.loc[ + :,evmc_storage_profile_decrease.columns.isin(['i','hour','year'] + val_r_all)]) + evmc_storage_profile_increase = ( + evmc_storage_profile_increase.loc[ + :,evmc_storage_profile_increase.columns.isin(['i','hour','year'] + val_r_all)]) + evmc_storage_energy = ( + evmc_storage_energy.loc[ + :,evmc_storage_energy.columns.isin(['i','hour','year'] + val_r_all)]) + else: + evmc_shape_profile_decrease = pd.DataFrame(columns=['i','hour','year']+val_r) + evmc_shape_profile_increase = pd.DataFrame(columns=['i','hour','year']+val_r) + evmc_storage_profile_decrease = pd.DataFrame(columns=['i','hour','year']+val_r) + evmc_storage_profile_increase = pd.DataFrame(columns=['i','hour','year']+val_r) + evmc_storage_energy = pd.DataFrame(columns=['i','hour','year']+val_r) dr_shed[['dr_type', 'yr_hrs']].to_csv( os.path.join(inputs_case, 'dr_shed.csv'), index=False, header=False) - dr_profile_increase.to_csv( - os.path.join(inputs_case,'dr_inc.csv'),index=False) - dr_profile_decrease.to_csv( - os.path.join(inputs_case,'dr_dec.csv'),index=False) evmc_shape_profile_decrease.to_csv( os.path.join(inputs_case,'evmc_shape_profile_decrease.csv'),index=False) evmc_shape_profile_increase.to_csv( @@ -196,11 +196,6 @@ def get_dr_shifts(sw, reeds_path, inputs_case, native_data=True, os.path.join(inputs_case,'evmc_storage_profile_increase.csv'),index=False) evmc_storage_energy.to_csv( os.path.join(inputs_case,'evmc_storage_energy.csv'),index=False) - - # Copy DR types - shutil.copy( - os.path.join(args.reeds_path,'inputs','demand_response',f'dr_types_{drscen}.csv'), - os.path.join(inputs_case, 'dr_types.csv')) print('Finished writedrshift.py') diff --git a/inputs/carbonconstraints/emit_scale.csv b/inputs/carbonconstraints/emit_scale.csv index 5a76971..38923ef 100644 --- a/inputs/carbonconstraints/emit_scale.csv +++ b/inputs/carbonconstraints/emit_scale.csv @@ -1,6 +1,6 @@ *e,emit_scale -CO2,1000 -SO2,1 -NOX,1 -HG,1 -CH4,1 +CO2,1 +SO2,0.001 +NOX,0.001 +HG,0.001 +CH4,0.001 diff --git a/inputs/hierarchy.csv b/inputs/hierarchy.csv index 2f01e53..33ad6cf 100644 --- a/inputs/hierarchy.csv +++ b/inputs/hierarchy.csv @@ -1,82 +1,82 @@ -*county,ba,nercr,transreg,transgrp,cendiv,st,interconnect,st_interconnect,country,usda_region,aggreg,county_name -p53007,p1,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,chelan -p53029,p1,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,island -p53033,p1,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,king -p53035,p1,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,kitsap -p53037,p1,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,kittitas -p53053,p1,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,pierce -p53055,p1,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,san juan -p53057,p1,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,skagit -p53061,p1,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,snohomish -p53067,p1,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,thurston -p53073,p1,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,whatcom -p53003,p2,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,asotin -p53005,p2,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,benton -p53009,p2,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,clallam -p53011,p2,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,clark -p53013,p2,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,columbia -p53015,p2,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,cowlitz -p53023,p2,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,garfield -p53027,p2,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,grays harbor -p53031,p2,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,jefferson -p53039,p2,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,klickitat -p53041,p2,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,lewis -p53045,p2,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,mason -p53049,p2,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,pacific -p53059,p2,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,skamania -p53069,p2,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,wahkiakum -p53071,p2,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,walla walla -p53077,p2,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,yakima -p53001,p3,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,adams -p53019,p3,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,ferry -p53021,p3,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,franklin -p53043,p3,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,lincoln -p53047,p3,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,okanogan -p53051,p3,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,pend oreille -p53063,p3,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,spokane -p53065,p3,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,stevens -p53075,p3,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,whitman -p53017,p4,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,douglas -p53025,p4,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,grant -p41003,p5,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,benton -p41005,p5,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,clackamas -p41007,p5,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,clatsop -p41009,p5,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,columbia -p41013,p5,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,crook -p41017,p5,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,deschutes -p41021,p5,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,gilliam -p41023,p5,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,grant -p41027,p5,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,hood river -p41031,p5,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,jefferson -p41039,p5,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,lane -p41041,p5,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,lincoln -p41043,p5,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,linn -p41047,p5,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,marion -p41049,p5,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,morrow -p41051,p5,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,multnomah -p41053,p5,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,polk -p41055,p5,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,sherman -p41057,p5,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,tillamook -p41059,p5,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,umatilla -p41061,p5,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,union -p41063,p5,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,wallowa -p41065,p5,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,wasco -p41067,p5,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,washington -p41069,p5,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,wheeler -p41071,p5,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,yamhill -p41011,p6,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,coos -p41015,p6,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,curry -p41019,p6,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,douglas -p41029,p6,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,jackson -p41033,p6,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,josephine -p41035,p6,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,klamath -p41001,p7,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,baker -p41025,p7,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,harney -p41037,p7,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,lake -p41045,p7,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,malheur -p06035,p8,WECC_NWPP,NorthernGrid,NorthernGrid_South,PA,CA,western,CA,USA,pacific,CA_NorthernGrid,lassen -p06049,p8,WECC_NWPP,NorthernGrid,NorthernGrid_South,PA,CA,western,CA,USA,pacific,CA_NorthernGrid,modoc -p06093,p8,WECC_NWPP,NorthernGrid,NorthernGrid_South,PA,CA,western,CA,USA,pacific,CA_NorthernGrid,siskiyou +county,ba,nercr,transreg,transgrp,cendiv,st,interconnect,st_interconnect,country,usda_region,aggreg,county_name +p53007,p1,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,chelan +p53029,p1,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,island +p53033,p1,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,king +p53035,p1,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,kitsap +p53037,p1,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,kittitas +p53053,p1,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,pierce +p53055,p1,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,san juan +p53057,p1,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,skagit +p53061,p1,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,snohomish +p53067,p1,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,thurston +p53073,p1,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,whatcom +p53003,p2,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,asotin +p53005,p2,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,benton +p53009,p2,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,clallam +p53011,p2,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,clark +p53013,p2,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,columbia +p53015,p2,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,cowlitz +p53023,p2,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,garfield +p53027,p2,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,grays harbor +p53031,p2,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,jefferson +p53039,p2,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,klickitat +p53041,p2,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,lewis +p53045,p2,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,mason +p53049,p2,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,pacific +p53059,p2,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,skamania +p53069,p2,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,wahkiakum +p53071,p2,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,walla walla +p53077,p2,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,yakima +p53001,p3,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,adams +p53019,p3,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,ferry +p53021,p3,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,franklin +p53043,p3,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,lincoln +p53047,p3,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,okanogan +p53051,p3,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,pend oreille +p53063,p3,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,spokane +p53065,p3,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,stevens +p53075,p3,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,whitman +p53017,p4,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,douglas +p53025,p4,WECC_NW,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,grant +p41003,p5,WECC_NW,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,benton +p41005,p5,WECC_NW,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,clackamas +p41007,p5,WECC_NW,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,clatsop +p41009,p5,WECC_NW,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,columbia +p41013,p5,WECC_NW,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,crook +p41017,p5,WECC_NW,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,deschutes +p41021,p5,WECC_NW,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,gilliam +p41023,p5,WECC_NW,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,grant +p41027,p5,WECC_NW,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,hood river +p41031,p5,WECC_NW,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,jefferson +p41039,p5,WECC_NW,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,lane +p41041,p5,WECC_NW,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,lincoln +p41043,p5,WECC_NW,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,linn +p41047,p5,WECC_NW,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,marion +p41049,p5,WECC_NW,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,morrow +p41051,p5,WECC_NW,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,multnomah +p41053,p5,WECC_NW,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,polk +p41055,p5,WECC_NW,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,sherman +p41057,p5,WECC_NW,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,tillamook +p41059,p5,WECC_NW,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,umatilla +p41061,p5,WECC_NW,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,union +p41063,p5,WECC_NW,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,wallowa +p41065,p5,WECC_NW,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,wasco +p41067,p5,WECC_NW,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,washington +p41069,p5,WECC_NW,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,wheeler +p41071,p5,WECC_NW,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,yamhill +p41011,p6,WECC_NW,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,coos +p41015,p6,WECC_NW,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,curry +p41019,p6,WECC_NW,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,douglas +p41029,p6,WECC_NW,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,jackson +p41033,p6,WECC_NW,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,josephine +p41035,p6,WECC_NW,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,klamath +p41001,p7,WECC_NW,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,baker +p41025,p7,WECC_NW,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,harney +p41037,p7,WECC_NW,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,lake +p41045,p7,WECC_NW,NorthernGrid,NorthernGrid_West,PA,OR,western,OR,USA,pacific,OR,malheur +p06035,p8,WECC_NW,NorthernGrid,NorthernGrid_South,PA,CA,western,CA,USA,pacific,CA_NorthernGrid,lassen +p06049,p8,WECC_NW,NorthernGrid,NorthernGrid_South,PA,CA,western,CA,USA,pacific,CA_NorthernGrid,modoc +p06093,p8,WECC_NW,NorthernGrid,NorthernGrid_South,PA,CA,western,CA,USA,pacific,CA_NorthernGrid,siskiyou p06001,p9,WECC_CA,CAISO,CAISO,PA,CA,western,CA,USA,pacific,CA_N_CAISO,alameda p06003,p9,WECC_CA,CAISO,CAISO,PA,CA,western,CA,USA,pacific,CA_N_CAISO,alpine p06005,p9,WECC_CA,CAISO,CAISO,PA,CA,western,CA,USA,pacific,CA_N_CAISO,amador @@ -132,275 +132,275 @@ p06071,p10,WECC_CA,CAISO,CAISO,PA,CA,western,CA,USA,pacific,CA_S_CAISO,san berna p06107,p10,WECC_CA,CAISO,CAISO,PA,CA,western,CA,USA,pacific,CA_S_CAISO,tulare p06111,p10,WECC_CA,CAISO,CAISO,PA,CA,western,CA,USA,pacific,CA_S_CAISO,ventura p06073,p11,WECC_CA,CAISO,CAISO,PA,CA,western,CA,USA,pacific,CA_S_CAISO,san diego -p32001,p12,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,NV,western,NV,USA,mountain,NV,churchill -p32005,p12,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,NV,western,NV,USA,mountain,NV,douglas -p32007,p12,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,NV,western,NV,USA,mountain,NV,elko -p32009,p12,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,NV,western,NV,USA,mountain,NV,esmeralda -p32011,p12,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,NV,western,NV,USA,mountain,NV,eureka -p32013,p12,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,NV,western,NV,USA,mountain,NV,humboldt -p32015,p12,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,NV,western,NV,USA,mountain,NV,lander -p32017,p12,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,NV,western,NV,USA,mountain,NV,lincoln -p32019,p12,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,NV,western,NV,USA,mountain,NV,lyon -p32021,p12,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,NV,western,NV,USA,mountain,NV,mineral -p32023,p12,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,NV,western,NV,USA,mountain,NV,nye -p32027,p12,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,NV,western,NV,USA,mountain,NV,pershing -p32029,p12,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,NV,western,NV,USA,mountain,NV,storey -p32031,p12,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,NV,western,NV,USA,mountain,NV,washoe -p32033,p12,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,NV,western,NV,USA,mountain,NV,white pine -p32510,p12,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,NV,western,NV,USA,mountain,NV,carson city -p32003,p13,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,NV,western,NV,USA,mountain,NV,clark -p16009,p14,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,benewah -p16017,p14,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,bonner -p16021,p14,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,boundary -p16055,p14,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,kootenai -p16057,p14,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,latah -p16061,p14,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,lewis -p16069,p14,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,nez perce -p16001,p15,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,ada -p16003,p15,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,adams -p16013,p15,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,blaine -p16015,p15,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,boise -p16023,p15,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,butte -p16025,p15,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,camas -p16027,p15,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,canyon -p16035,p15,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,clearwater -p16037,p15,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,custer -p16039,p15,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,elmore -p16045,p15,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,gem -p16047,p15,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,gooding -p16049,p15,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,idaho -p16053,p15,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,jerome -p16059,p15,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,lemhi -p16063,p15,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,lincoln -p16067,p15,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,minidoka -p16073,p15,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,owyhee -p16075,p15,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,payette -p16079,p15,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,shoshone -p16085,p15,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,valley -p16087,p15,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,washington -p16005,p16,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,bannock -p16007,p16,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,bear lake -p16011,p16,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,bingham -p16019,p16,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,bonneville -p16029,p16,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,caribou -p16031,p16,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,cassia -p16033,p16,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,clark -p16041,p16,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,franklin -p16043,p16,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,fremont -p16051,p16,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,jefferson -p16065,p16,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,madison -p16071,p16,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,oneida -p16077,p16,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,power -p16081,p16,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,teton -p16083,p16,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,twin falls -p30001,p17,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,beaverhead -p30007,p17,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,broadwater -p30023,p17,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,deer lodge -p30029,p17,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,flathead -p30039,p17,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,granite -p30043,p17,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,jefferson -p30047,p17,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,lake -p30053,p17,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,lincoln -p30057,p17,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,madison -p30061,p17,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,mineral -p30063,p17,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,missoula -p30077,p17,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,powell -p30081,p17,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,ravalli -p30089,p17,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,sanders -p30093,p17,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,silver bow -p30003,p18,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,big horn -p30005,p18,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,blaine -p30009,p18,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,carbon -p30013,p18,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,cascade -p30015,p18,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,chouteau -p30031,p18,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,gallatin -p30035,p18,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,glacier -p30041,p18,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,hill -p30049,p18,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,lewis and clark -p30051,p18,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,liberty -p30059,p18,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,meagher -p30067,p18,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,park -p30071,p18,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,phillips -p30073,p18,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,pondera -p30095,p18,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,stillwater -p30097,p18,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,sweet grass -p30099,p18,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,teton -p30101,p18,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,toole -p30033,p19,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,garfield -p30105,p19,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,valley -p30027,p20,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,fergus -p30037,p20,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,golden valley -p30045,p20,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,judith basin -p30065,p20,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,musselshell -p30069,p20,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,petroleum -p30075,p20,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,powder river -p30087,p20,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,rosebud -p30103,p20,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,treasure -p30107,p20,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,wheatland -p30111,p20,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,yellowstone -p56003,p21,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,WY,western,WY,USA,mountain,WY_NorthernGrid,big horn -p56013,p21,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,WY,western,WY,USA,mountain,WY_NorthernGrid,fremont -p56017,p21,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,WY,western,WY,USA,mountain,WY_NorthernGrid,hot springs -p56023,p21,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,WY,western,WY,USA,mountain,WY_NorthernGrid,lincoln -p56029,p21,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,WY,western,WY,USA,mountain,WY_NorthernGrid,park -p56035,p21,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,WY,western,WY,USA,mountain,WY_NorthernGrid,sublette -p56037,p21,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,WY,western,WY,USA,mountain,WY_NorthernGrid,sweetwater -p56039,p21,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,WY,western,WY,USA,mountain,WY_NorthernGrid,teton -p56041,p21,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,WY,western,WY,USA,mountain,WY_NorthernGrid,uinta -p56043,p21,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,WY,western,WY,USA,mountain,WY_NorthernGrid,washakie -p56019,p22,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,WY,western,WY,USA,mountain,WY_NorthernGrid,johnson -p56033,p22,WECC_NWPP,NorthernGrid,NorthernGrid_East,MTN,WY,western,WY,USA,mountain,WY_NorthernGrid,sheridan -p56005,p23,WECC_NWPP,WestConnect,WestConnect_North,MTN,WY,western,WY,USA,mountain,WY_WestConnect,campbell -p56011,p23,WECC_NWPP,WestConnect,WestConnect_North,MTN,WY,western,WY,USA,mountain,WY_WestConnect,crook -p56045,p23,WECC_NWPP,WestConnect,WestConnect_North,MTN,WY,western,WY,USA,mountain,WY_WestConnect,weston -p56001,p24,WECC_NWPP,WestConnect,WestConnect_North,MTN,WY,western,WY,USA,mountain,WY_WestConnect,albany -p56007,p24,WECC_NWPP,WestConnect,WestConnect_North,MTN,WY,western,WY,USA,mountain,WY_WestConnect,carbon -p56009,p24,WECC_NWPP,WestConnect,WestConnect_North,MTN,WY,western,WY,USA,mountain,WY_WestConnect,converse -p56015,p24,WECC_NWPP,WestConnect,WestConnect_North,MTN,WY,western,WY,USA,mountain,WY_WestConnect,goshen -p56021,p24,WECC_NWPP,WestConnect,WestConnect_North,MTN,WY,western,WY,USA,mountain,WY_WestConnect,laramie -p56025,p24,WECC_NWPP,WestConnect,WestConnect_North,MTN,WY,western,WY,USA,mountain,WY_WestConnect,natrona -p56027,p24,WECC_NWPP,WestConnect,WestConnect_North,MTN,WY,western,WY,USA,mountain,WY_WestConnect,niobrara -p56031,p24,WECC_NWPP,WestConnect,WestConnect_North,MTN,WY,western,WY,USA,mountain,WY_WestConnect,platte -p49001,p25,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,beaver -p49003,p25,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,box elder -p49005,p25,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,cache -p49007,p25,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,carbon -p49011,p25,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,davis -p49015,p25,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,emery -p49017,p25,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,garfield -p49019,p25,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,grand -p49021,p25,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,iron -p49023,p25,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,juab -p49025,p25,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,kane -p49027,p25,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,millard -p49031,p25,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,piute -p49035,p25,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,salt lake -p49037,p25,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,san juan -p49039,p25,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,sanpete -p49041,p25,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,sevier -p49045,p25,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,tooele -p49049,p25,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,utah -p49053,p25,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,washington -p49055,p25,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,wayne -p49057,p25,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,weber -p49009,p26,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,daggett -p49013,p26,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,duchesne -p49029,p26,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,morgan -p49033,p26,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,rich -p49043,p26,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,summit -p49047,p26,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,uintah -p49051,p26,WECC_NWPP,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,wasatch -p04015,p27,WECC_SRSG,WestConnect,WestConnect_South,MTN,AZ,western,AZ,USA,mountain,AZ,mohave -p04003,p28,WECC_SRSG,WestConnect,WestConnect_South,MTN,AZ,western,AZ,USA,mountain,AZ,cochise -p04005,p28,WECC_SRSG,WestConnect,WestConnect_South,MTN,AZ,western,AZ,USA,mountain,AZ,coconino -p04007,p28,WECC_SRSG,WestConnect,WestConnect_South,MTN,AZ,western,AZ,USA,mountain,AZ,gila -p04009,p28,WECC_SRSG,WestConnect,WestConnect_South,MTN,AZ,western,AZ,USA,mountain,AZ,graham -p04011,p28,WECC_SRSG,WestConnect,WestConnect_South,MTN,AZ,western,AZ,USA,mountain,AZ,greenlee -p04012,p28,WECC_SRSG,WestConnect,WestConnect_South,MTN,AZ,western,AZ,USA,mountain,AZ,la paz -p04013,p28,WECC_SRSG,WestConnect,WestConnect_South,MTN,AZ,western,AZ,USA,mountain,AZ,maricopa -p04017,p28,WECC_SRSG,WestConnect,WestConnect_South,MTN,AZ,western,AZ,USA,mountain,AZ,navajo -p04021,p28,WECC_SRSG,WestConnect,WestConnect_South,MTN,AZ,western,AZ,USA,mountain,AZ,pinal -p04025,p28,WECC_SRSG,WestConnect,WestConnect_South,MTN,AZ,western,AZ,USA,mountain,AZ,yavapai -p04027,p28,WECC_SRSG,WestConnect,WestConnect_South,MTN,AZ,western,AZ,USA,mountain,AZ,yuma -p04001,p29,WECC_SRSG,WestConnect,WestConnect_South,MTN,AZ,western,AZ,USA,mountain,AZ,apache -p04019,p30,WECC_SRSG,WestConnect,WestConnect_South,MTN,AZ,western,AZ,USA,mountain,AZ,pima -p04023,p30,WECC_SRSG,WestConnect,WestConnect_South,MTN,AZ,western,AZ,USA,mountain,AZ,santa cruz -p35001,p31,WECC_SRSG,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,bernalillo -p35003,p31,WECC_SRSG,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,catron -p35006,p31,WECC_SRSG,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,cibola -p35007,p31,WECC_SRSG,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,colfax -p35011,p31,WECC_SRSG,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,de baca -p35013,p31,WECC_SRSG,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,dona ana -p35017,p31,WECC_SRSG,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,grant -p35019,p31,WECC_SRSG,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,guadalupe -p35021,p31,WECC_SRSG,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,harding -p35023,p31,WECC_SRSG,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,hidalgo -p35027,p31,WECC_SRSG,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,lincoln -p35028,p31,WECC_SRSG,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,los alamos -p35029,p31,WECC_SRSG,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,luna -p35031,p31,WECC_SRSG,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,mckinley -p35033,p31,WECC_SRSG,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,mora -p35035,p31,WECC_SRSG,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,otero -p35039,p31,WECC_SRSG,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,rio arriba -p35043,p31,WECC_SRSG,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,sandoval -p35045,p31,WECC_SRSG,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,san juan -p35047,p31,WECC_SRSG,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,san miguel -p35049,p31,WECC_SRSG,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,santa fe -p35051,p31,WECC_SRSG,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,sierra -p35053,p31,WECC_SRSG,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,socorro -p35055,p31,WECC_SRSG,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,taos -p35057,p31,WECC_SRSG,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,torrance -p35059,p31,WECC_SRSG,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,union -p35061,p31,WECC_SRSG,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,valencia -p46019,p32,WECC_NWPP,WestConnect,WestConnect_North,MTN,SD,western,SD_western,USA,northern-plains,SD_WestConnect,butte -p46033,p32,WECC_NWPP,WestConnect,WestConnect_North,MTN,SD,western,SD_western,USA,northern-plains,SD_WestConnect,custer -p46047,p32,WECC_NWPP,WestConnect,WestConnect_North,MTN,SD,western,SD_western,USA,northern-plains,SD_WestConnect,fall river -p46081,p32,WECC_NWPP,WestConnect,WestConnect_North,MTN,SD,western,SD_western,USA,northern-plains,SD_WestConnect,lawrence -p46103,p32,WECC_NWPP,WestConnect,WestConnect_North,MTN,SD,western,SD_western,USA,northern-plains,SD_WestConnect,pennington -p08001,p33,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,adams -p08005,p33,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,arapahoe -p08013,p33,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,boulder -p08014,p33,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,broomfield -p08019,p33,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,clear creek -p08031,p33,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,denver -p08035,p33,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,douglas -p08037,p33,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,eagle -p08045,p33,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,garfield -p08047,p33,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,gilpin -p08049,p33,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,grand -p08057,p33,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,jackson -p08059,p33,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,jefferson -p08065,p33,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,lake -p08069,p33,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,larimer -p08075,p33,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,logan -p08077,p33,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,mesa -p08081,p33,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,moffat -p08087,p33,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,morgan -p08093,p33,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,park -p08095,p33,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,phillips -p08097,p33,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,pitkin -p08103,p33,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,rio blanco -p08107,p33,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,routt -p08115,p33,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,sedgwick -p08117,p33,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,summit -p08119,p33,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,teller -p08121,p33,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,washington -p08123,p33,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,weld -p08125,p33,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,yuma -p08003,p34,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,alamosa -p08007,p34,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,archuleta -p08009,p34,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,baca -p08011,p34,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,bent -p08015,p34,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,chaffee -p08017,p34,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,cheyenne -p08021,p34,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,conejos -p08023,p34,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,costilla -p08025,p34,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,crowley -p08027,p34,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,custer -p08029,p34,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,delta -p08033,p34,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,dolores -p08039,p34,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,elbert -p08041,p34,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,el paso -p08043,p34,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,fremont -p08051,p34,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,gunnison -p08053,p34,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,hinsdale -p08055,p34,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,huerfano -p08061,p34,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,kiowa -p08063,p34,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,kit carson -p08067,p34,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,la plata -p08071,p34,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,las animas -p08073,p34,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,lincoln -p08079,p34,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,mineral -p08083,p34,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,montezuma -p08085,p34,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,montrose -p08089,p34,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,otero -p08091,p34,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,ouray -p08099,p34,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,prowers -p08101,p34,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,pueblo -p08105,p34,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,rio grande -p08109,p34,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,saguache -p08111,p34,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,san juan -p08113,p34,WECC_NWPP,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,san miguel +p32001,p12,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,NV,western,NV,USA,mountain,NV,churchill +p32005,p12,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,NV,western,NV,USA,mountain,NV,douglas +p32007,p12,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,NV,western,NV,USA,mountain,NV,elko +p32009,p12,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,NV,western,NV,USA,mountain,NV,esmeralda +p32011,p12,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,NV,western,NV,USA,mountain,NV,eureka +p32013,p12,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,NV,western,NV,USA,mountain,NV,humboldt +p32015,p12,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,NV,western,NV,USA,mountain,NV,lander +p32017,p12,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,NV,western,NV,USA,mountain,NV,lincoln +p32019,p12,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,NV,western,NV,USA,mountain,NV,lyon +p32021,p12,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,NV,western,NV,USA,mountain,NV,mineral +p32023,p12,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,NV,western,NV,USA,mountain,NV,nye +p32027,p12,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,NV,western,NV,USA,mountain,NV,pershing +p32029,p12,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,NV,western,NV,USA,mountain,NV,storey +p32031,p12,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,NV,western,NV,USA,mountain,NV,washoe +p32033,p12,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,NV,western,NV,USA,mountain,NV,white pine +p32510,p12,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,NV,western,NV,USA,mountain,NV,carson city +p32003,p13,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,NV,western,NV,USA,mountain,NV,clark +p16009,p14,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,benewah +p16017,p14,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,bonner +p16021,p14,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,boundary +p16055,p14,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,kootenai +p16057,p14,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,latah +p16061,p14,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,lewis +p16069,p14,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,nez perce +p16001,p15,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,ada +p16003,p15,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,adams +p16013,p15,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,blaine +p16015,p15,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,boise +p16023,p15,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,butte +p16025,p15,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,camas +p16027,p15,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,canyon +p16035,p15,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,clearwater +p16037,p15,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,custer +p16039,p15,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,elmore +p16045,p15,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,gem +p16047,p15,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,gooding +p16049,p15,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,idaho +p16053,p15,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,jerome +p16059,p15,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,lemhi +p16063,p15,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,lincoln +p16067,p15,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,minidoka +p16073,p15,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,owyhee +p16075,p15,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,payette +p16079,p15,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,shoshone +p16085,p15,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,valley +p16087,p15,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,washington +p16005,p16,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,bannock +p16007,p16,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,bear lake +p16011,p16,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,bingham +p16019,p16,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,bonneville +p16029,p16,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,caribou +p16031,p16,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,cassia +p16033,p16,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,clark +p16041,p16,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,franklin +p16043,p16,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,fremont +p16051,p16,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,jefferson +p16065,p16,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,madison +p16071,p16,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,oneida +p16077,p16,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,power +p16081,p16,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,teton +p16083,p16,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,ID,western,ID,USA,mountain,ID,twin falls +p30001,p17,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,beaverhead +p30007,p17,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,broadwater +p30023,p17,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,deer lodge +p30029,p17,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,flathead +p30039,p17,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,granite +p30043,p17,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,jefferson +p30047,p17,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,lake +p30053,p17,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,lincoln +p30057,p17,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,madison +p30061,p17,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,mineral +p30063,p17,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,missoula +p30077,p17,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,powell +p30081,p17,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,ravalli +p30089,p17,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,sanders +p30093,p17,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,silver bow +p30003,p18,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,big horn +p30005,p18,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,blaine +p30009,p18,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,carbon +p30013,p18,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,cascade +p30015,p18,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,chouteau +p30031,p18,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,gallatin +p30035,p18,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,glacier +p30041,p18,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,hill +p30049,p18,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,lewis and clark +p30051,p18,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,liberty +p30059,p18,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,meagher +p30067,p18,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,park +p30071,p18,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,phillips +p30073,p18,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,pondera +p30095,p18,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,stillwater +p30097,p18,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,sweet grass +p30099,p18,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,teton +p30101,p18,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,toole +p30033,p19,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,garfield +p30105,p19,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,valley +p30027,p20,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,fergus +p30037,p20,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,golden valley +p30045,p20,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,judith basin +p30065,p20,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,musselshell +p30069,p20,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,petroleum +p30075,p20,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,powder river +p30087,p20,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,rosebud +p30103,p20,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,treasure +p30107,p20,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,wheatland +p30111,p20,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,MT,western,MT_western,USA,mountain,MT_NorthernGrid,yellowstone +p56003,p21,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,WY,western,WY,USA,mountain,WY_NorthernGrid,big horn +p56013,p21,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,WY,western,WY,USA,mountain,WY_NorthernGrid,fremont +p56017,p21,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,WY,western,WY,USA,mountain,WY_NorthernGrid,hot springs +p56023,p21,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,WY,western,WY,USA,mountain,WY_NorthernGrid,lincoln +p56029,p21,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,WY,western,WY,USA,mountain,WY_NorthernGrid,park +p56035,p21,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,WY,western,WY,USA,mountain,WY_NorthernGrid,sublette +p56037,p21,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,WY,western,WY,USA,mountain,WY_NorthernGrid,sweetwater +p56039,p21,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,WY,western,WY,USA,mountain,WY_NorthernGrid,teton +p56041,p21,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,WY,western,WY,USA,mountain,WY_NorthernGrid,uinta +p56043,p21,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,WY,western,WY,USA,mountain,WY_NorthernGrid,washakie +p56019,p22,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,WY,western,WY,USA,mountain,WY_NorthernGrid,johnson +p56033,p22,WECC_NW,NorthernGrid,NorthernGrid_East,MTN,WY,western,WY,USA,mountain,WY_NorthernGrid,sheridan +p56005,p23,WECC_NW,WestConnect,WestConnect_North,MTN,WY,western,WY,USA,mountain,WY_WestConnect,campbell +p56011,p23,WECC_NW,WestConnect,WestConnect_North,MTN,WY,western,WY,USA,mountain,WY_WestConnect,crook +p56045,p23,WECC_NW,WestConnect,WestConnect_North,MTN,WY,western,WY,USA,mountain,WY_WestConnect,weston +p56001,p24,WECC_NW,WestConnect,WestConnect_North,MTN,WY,western,WY,USA,mountain,WY_WestConnect,albany +p56007,p24,WECC_NW,WestConnect,WestConnect_North,MTN,WY,western,WY,USA,mountain,WY_WestConnect,carbon +p56009,p24,WECC_NW,WestConnect,WestConnect_North,MTN,WY,western,WY,USA,mountain,WY_WestConnect,converse +p56015,p24,WECC_NW,WestConnect,WestConnect_North,MTN,WY,western,WY,USA,mountain,WY_WestConnect,goshen +p56021,p24,WECC_NW,WestConnect,WestConnect_North,MTN,WY,western,WY,USA,mountain,WY_WestConnect,laramie +p56025,p24,WECC_NW,WestConnect,WestConnect_North,MTN,WY,western,WY,USA,mountain,WY_WestConnect,natrona +p56027,p24,WECC_NW,WestConnect,WestConnect_North,MTN,WY,western,WY,USA,mountain,WY_WestConnect,niobrara +p56031,p24,WECC_NW,WestConnect,WestConnect_North,MTN,WY,western,WY,USA,mountain,WY_WestConnect,platte +p49001,p25,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,beaver +p49003,p25,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,box elder +p49005,p25,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,cache +p49007,p25,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,carbon +p49011,p25,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,davis +p49015,p25,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,emery +p49017,p25,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,garfield +p49019,p25,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,grand +p49021,p25,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,iron +p49023,p25,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,juab +p49025,p25,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,kane +p49027,p25,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,millard +p49031,p25,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,piute +p49035,p25,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,salt lake +p49037,p25,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,san juan +p49039,p25,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,sanpete +p49041,p25,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,sevier +p49045,p25,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,tooele +p49049,p25,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,utah +p49053,p25,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,washington +p49055,p25,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,wayne +p49057,p25,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,weber +p49009,p26,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,daggett +p49013,p26,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,duchesne +p49029,p26,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,morgan +p49033,p26,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,rich +p49043,p26,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,summit +p49047,p26,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,uintah +p49051,p26,WECC_NW,NorthernGrid,NorthernGrid_South,MTN,UT,western,UT,USA,mountain,UT,wasatch +p04015,p27,WECC_SW,WestConnect,WestConnect_South,MTN,AZ,western,AZ,USA,mountain,AZ,mohave +p04003,p28,WECC_SW,WestConnect,WestConnect_South,MTN,AZ,western,AZ,USA,mountain,AZ,cochise +p04005,p28,WECC_SW,WestConnect,WestConnect_South,MTN,AZ,western,AZ,USA,mountain,AZ,coconino +p04007,p28,WECC_SW,WestConnect,WestConnect_South,MTN,AZ,western,AZ,USA,mountain,AZ,gila +p04009,p28,WECC_SW,WestConnect,WestConnect_South,MTN,AZ,western,AZ,USA,mountain,AZ,graham +p04011,p28,WECC_SW,WestConnect,WestConnect_South,MTN,AZ,western,AZ,USA,mountain,AZ,greenlee +p04012,p28,WECC_SW,WestConnect,WestConnect_South,MTN,AZ,western,AZ,USA,mountain,AZ,la paz +p04013,p28,WECC_SW,WestConnect,WestConnect_South,MTN,AZ,western,AZ,USA,mountain,AZ,maricopa +p04017,p28,WECC_SW,WestConnect,WestConnect_South,MTN,AZ,western,AZ,USA,mountain,AZ,navajo +p04021,p28,WECC_SW,WestConnect,WestConnect_South,MTN,AZ,western,AZ,USA,mountain,AZ,pinal +p04025,p28,WECC_SW,WestConnect,WestConnect_South,MTN,AZ,western,AZ,USA,mountain,AZ,yavapai +p04027,p28,WECC_SW,WestConnect,WestConnect_South,MTN,AZ,western,AZ,USA,mountain,AZ,yuma +p04001,p29,WECC_SW,WestConnect,WestConnect_South,MTN,AZ,western,AZ,USA,mountain,AZ,apache +p04019,p30,WECC_SW,WestConnect,WestConnect_South,MTN,AZ,western,AZ,USA,mountain,AZ,pima +p04023,p30,WECC_SW,WestConnect,WestConnect_South,MTN,AZ,western,AZ,USA,mountain,AZ,santa cruz +p35001,p31,WECC_SW,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,bernalillo +p35003,p31,WECC_SW,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,catron +p35006,p31,WECC_SW,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,cibola +p35007,p31,WECC_SW,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,colfax +p35011,p31,WECC_SW,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,de baca +p35013,p31,WECC_SW,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,dona ana +p35017,p31,WECC_SW,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,grant +p35019,p31,WECC_SW,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,guadalupe +p35021,p31,WECC_SW,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,harding +p35023,p31,WECC_SW,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,hidalgo +p35027,p31,WECC_SW,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,lincoln +p35028,p31,WECC_SW,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,los alamos +p35029,p31,WECC_SW,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,luna +p35031,p31,WECC_SW,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,mckinley +p35033,p31,WECC_SW,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,mora +p35035,p31,WECC_SW,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,otero +p35039,p31,WECC_SW,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,rio arriba +p35043,p31,WECC_SW,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,sandoval +p35045,p31,WECC_SW,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,san juan +p35047,p31,WECC_SW,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,san miguel +p35049,p31,WECC_SW,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,santa fe +p35051,p31,WECC_SW,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,sierra +p35053,p31,WECC_SW,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,socorro +p35055,p31,WECC_SW,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,taos +p35057,p31,WECC_SW,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,torrance +p35059,p31,WECC_SW,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,union +p35061,p31,WECC_SW,WestConnect,WestConnect_South,MTN,NM,western,NM_western,USA,mountain,NM_WestConnect,valencia +p46019,p32,WECC_NW,WestConnect,WestConnect_North,MTN,SD,western,SD_western,USA,northern-plains,SD_WestConnect,butte +p46033,p32,WECC_NW,WestConnect,WestConnect_North,MTN,SD,western,SD_western,USA,northern-plains,SD_WestConnect,custer +p46047,p32,WECC_NW,WestConnect,WestConnect_North,MTN,SD,western,SD_western,USA,northern-plains,SD_WestConnect,fall river +p46081,p32,WECC_NW,WestConnect,WestConnect_North,MTN,SD,western,SD_western,USA,northern-plains,SD_WestConnect,lawrence +p46103,p32,WECC_NW,WestConnect,WestConnect_North,MTN,SD,western,SD_western,USA,northern-plains,SD_WestConnect,pennington +p08001,p33,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,adams +p08005,p33,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,arapahoe +p08013,p33,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,boulder +p08014,p33,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,broomfield +p08019,p33,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,clear creek +p08031,p33,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,denver +p08035,p33,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,douglas +p08037,p33,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,eagle +p08045,p33,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,garfield +p08047,p33,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,gilpin +p08049,p33,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,grand +p08057,p33,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,jackson +p08059,p33,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,jefferson +p08065,p33,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,lake +p08069,p33,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,larimer +p08075,p33,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,logan +p08077,p33,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,mesa +p08081,p33,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,moffat +p08087,p33,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,morgan +p08093,p33,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,park +p08095,p33,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,phillips +p08097,p33,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,pitkin +p08103,p33,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,rio blanco +p08107,p33,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,routt +p08115,p33,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,sedgwick +p08117,p33,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,summit +p08119,p33,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,teller +p08121,p33,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,washington +p08123,p33,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,weld +p08125,p33,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,yuma +p08003,p34,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,alamosa +p08007,p34,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,archuleta +p08009,p34,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,baca +p08011,p34,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,bent +p08015,p34,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,chaffee +p08017,p34,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,cheyenne +p08021,p34,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,conejos +p08023,p34,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,costilla +p08025,p34,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,crowley +p08027,p34,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,custer +p08029,p34,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,delta +p08033,p34,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,dolores +p08039,p34,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,elbert +p08041,p34,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,el paso +p08043,p34,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,fremont +p08051,p34,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,gunnison +p08053,p34,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,hinsdale +p08055,p34,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,huerfano +p08061,p34,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,kiowa +p08063,p34,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,kit carson +p08067,p34,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,la plata +p08071,p34,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,las animas +p08073,p34,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,lincoln +p08079,p34,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,mineral +p08083,p34,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,montezuma +p08085,p34,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,montrose +p08089,p34,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,otero +p08091,p34,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,ouray +p08099,p34,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,prowers +p08101,p34,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,pueblo +p08105,p34,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,rio grande +p08109,p34,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,saguache +p08111,p34,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,san juan +p08113,p34,WECC_NW,WestConnect,WestConnect_North,MTN,CO,western,CO,USA,mountain,CO,san miguel p30011,p35,SPP,SPP,SPP_North,WNC,MT,eastern,MT_eastern,USA,mountain,MT_SPP,carter p30017,p35,SPP,SPP,SPP_North,WNC,MT,eastern,MT_eastern,USA,mountain,MT_SPP,custer p30019,p35,SPP,SPP,SPP_North,WNC,MT,eastern,MT_eastern,USA,mountain,MT_SPP,daniels @@ -1109,7 +1109,7 @@ p22119,p58,MISO,MISO,MISO_South,WSC,LA,eastern,LA,USA,delta-states,LA,webster p22121,p58,MISO,MISO,MISO_South,WSC,LA,eastern,LA,USA,delta-states,LA,west baton rouge p22125,p58,MISO,MISO,MISO_South,WSC,LA,eastern,LA,USA,delta-states,LA,west feliciana p22127,p58,MISO,MISO,MISO_South,WSC,LA,eastern,LA,USA,delta-states,LA,winn -p48141,p59,WECC_SRSG,WestConnect,WestConnect_South,MTN,TX,western,TX_western,USA,southern-plains,TX_WestConnect,el paso +p48141,p59,WECC_SW,WestConnect,WestConnect_South,MTN,TX,western,TX_western,USA,southern-plains,TX_WestConnect,el paso p48023,p60,ERCOT,ERCOT,ERCOT,WSC,TX,ercot,TX_ercot,USA,southern-plains,TX_W_ERCOT,baylor p48033,p60,ERCOT,ERCOT,ERCOT,WSC,TX,ercot,TX_ercot,USA,southern-plains,TX_W_ERCOT,borden p48045,p60,ERCOT,ERCOT,ERCOT,WSC,TX,ercot,TX_ercot,USA,southern-plains,TX_W_ERCOT,briscoe @@ -1744,533 +1744,533 @@ p28151,p87,MISO,MISO,MISO_South,ESC,MS,eastern,MS,USA,delta-states,MS_MISO,washi p28153,p87,MISO,MISO,MISO_South,ESC,MS,eastern,MS,USA,delta-states,MS_MISO,wayne p28157,p87,MISO,MISO,MISO_South,ESC,MS,eastern,MS,USA,delta-states,MS_MISO,wilkinson p28163,p87,MISO,MISO,MISO_South,ESC,MS,eastern,MS,USA,delta-states,MS_MISO,yazoo -p28003,p88,SERC,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,alcorn -p28009,p88,SERC,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,benton -p28013,p88,SERC,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,calhoun -p28017,p88,SERC,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,chickasaw -p28019,p88,SERC,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,choctaw -p28025,p88,SERC,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,clay -p28057,p88,SERC,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,itawamba -p28069,p88,SERC,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,kemper -p28071,p88,SERC,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,lafayette -p28081,p88,SERC,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,lee -p28087,p88,SERC,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,lowndes -p28093,p88,SERC,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,marshall -p28095,p88,SERC,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,monroe -p28099,p88,SERC,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,neshoba -p28103,p88,SERC,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,noxubee -p28105,p88,SERC,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,oktibbeha -p28115,p88,SERC,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,pontotoc -p28117,p88,SERC,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,prentiss -p28139,p88,SERC,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,tippah -p28141,p88,SERC,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,tishomingo -p28145,p88,SERC,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,union -p28155,p88,SERC,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,webster -p28159,p88,SERC,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,winston -p28161,p88,SERC,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,yalobusha -p01007,p89,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,bibb -p01023,p89,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,choctaw -p01033,p89,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,colbert -p01043,p89,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,cullman -p01049,p89,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,dekalb -p01057,p89,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,fayette -p01059,p89,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,franklin -p01063,p89,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,greene -p01065,p89,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,hale -p01071,p89,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,jackson -p01073,p89,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,jefferson -p01075,p89,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,lamar -p01077,p89,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,lauderdale -p01079,p89,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,lawrence -p01083,p89,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,limestone -p01089,p89,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,madison -p01091,p89,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,marengo -p01093,p89,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,marion -p01095,p89,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,marshall -p01103,p89,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,morgan -p01105,p89,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,perry -p01107,p89,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,pickens -p01117,p89,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,shelby -p01119,p89,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,sumter -p01125,p89,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,tuscaloosa -p01127,p89,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,walker -p01131,p89,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,wilcox -p01133,p89,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,winston -p01001,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,autauga -p01003,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,baldwin -p01005,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,barbour -p01009,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,blount -p01011,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,bullock -p01013,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,butler -p01015,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,calhoun -p01017,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,chambers -p01019,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,cherokee -p01021,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,chilton -p01025,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,clarke -p01027,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,clay -p01029,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,cleburne -p01031,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,coffee -p01035,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,conecuh -p01037,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,coosa -p01039,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,covington -p01041,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,crenshaw -p01045,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,dale -p01047,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,dallas -p01051,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,elmore -p01053,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,escambia -p01055,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,etowah -p01061,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,geneva -p01067,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,henry -p01069,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,houston -p01081,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,lee -p01085,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,lowndes -p01087,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,macon -p01097,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,mobile -p01099,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,monroe -p01101,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,montgomery -p01109,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,pike -p01111,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,randolph -p01113,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,russell -p01115,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,st. clair -p01121,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,talladega -p01123,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,tallapoosa -p01129,p90,SERC,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,washington -p12005,p91,SERC,SERTP,SERTP,SA,FL,eastern,FL,USA,southeast,FL_SERTP,bay -p12013,p91,SERC,SERTP,SERTP,SA,FL,eastern,FL,USA,southeast,FL_SERTP,calhoun -p12033,p91,SERC,SERTP,SERTP,SA,FL,eastern,FL,USA,southeast,FL_SERTP,escambia -p12045,p91,SERC,SERTP,SERTP,SA,FL,eastern,FL,USA,southeast,FL_SERTP,gulf -p12059,p91,SERC,SERTP,SERTP,SA,FL,eastern,FL,USA,southeast,FL_SERTP,holmes -p12063,p91,SERC,SERTP,SERTP,SA,FL,eastern,FL,USA,southeast,FL_SERTP,jackson -p12091,p91,SERC,SERTP,SERTP,SA,FL,eastern,FL,USA,southeast,FL_SERTP,okaloosa -p12113,p91,SERC,SERTP,SERTP,SA,FL,eastern,FL,USA,southeast,FL_SERTP,santa rosa -p12131,p91,SERC,SERTP,SERTP,SA,FL,eastern,FL,USA,southeast,FL_SERTP,walton -p12133,p91,SERC,SERTP,SERTP,SA,FL,eastern,FL,USA,southeast,FL_SERTP,washington -p47001,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,anderson -p47003,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,bedford -p47005,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,benton -p47007,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,bledsoe -p47009,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,blount -p47011,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,bradley -p47013,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,campbell -p47015,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,cannon -p47017,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,carroll -p47019,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,carter -p47021,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,cheatham -p47023,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,chester -p47025,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,claiborne -p47027,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,clay -p47029,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,cocke -p47031,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,coffee -p47033,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,crockett -p47035,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,cumberland -p47037,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,davidson -p47039,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,decatur -p47041,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,dekalb -p47043,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,dickson -p47045,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,dyer -p47047,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,fayette -p47049,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,fentress -p47051,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,franklin -p47053,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,gibson -p47055,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,giles -p47057,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,grainger -p47059,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,greene -p47061,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,grundy -p47063,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,hamblen -p47065,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,hamilton -p47067,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,hancock -p47069,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,hardeman -p47071,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,hardin -p47073,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,hawkins -p47075,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,haywood -p47077,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,henderson -p47079,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,henry -p47081,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,hickman -p47083,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,houston -p47085,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,humphreys -p47087,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,jackson -p47089,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,jefferson -p47091,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,johnson -p47093,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,knox -p47095,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,lake -p47097,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,lauderdale -p47099,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,lawrence -p47101,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,lewis -p47103,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,lincoln -p47105,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,loudon -p47107,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,mcminn -p47109,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,mcnairy -p47111,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,macon -p47113,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,madison -p47115,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,marion -p47117,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,marshall -p47119,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,maury -p47121,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,meigs -p47123,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,monroe -p47125,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,montgomery -p47127,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,moore -p47129,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,morgan -p47131,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,obion -p47133,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,overton -p47135,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,perry -p47137,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,pickett -p47139,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,polk -p47141,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,putnam -p47143,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,rhea -p47145,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,roane -p47147,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,robertson -p47149,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,rutherford -p47151,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,scott -p47153,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,sequatchie -p47155,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,sevier -p47157,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,shelby -p47159,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,smith -p47161,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,stewart -p47163,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,sullivan -p47165,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,sumner -p47167,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,tipton -p47169,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,trousdale -p47171,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,unicoi -p47173,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,union -p47175,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,van buren -p47177,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,warren -p47179,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,washington -p47181,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,wayne -p47183,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,weakley -p47185,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,white -p47187,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,williamson -p47189,p92,SERC,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,wilson -p21003,p93,SERC,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,allen -p21007,p93,SERC,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,ballard -p21031,p93,SERC,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,butler -p21035,p93,SERC,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,calloway -p21039,p93,SERC,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,carlisle -p21047,p93,SERC,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,christian -p21053,p93,SERC,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,clinton -p21057,p93,SERC,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,cumberland -p21075,p93,SERC,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,fulton -p21083,p93,SERC,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,graves -p21105,p93,SERC,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,hickman -p21139,p93,SERC,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,livingston -p21141,p93,SERC,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,logan -p21143,p93,SERC,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,lyon -p21145,p93,SERC,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,mccracken -p21147,p93,SERC,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,mccreary -p21157,p93,SERC,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,marshall -p21169,p93,SERC,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,metcalfe -p21171,p93,SERC,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,monroe -p21199,p93,SERC,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,pulaski -p21207,p93,SERC,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,russell -p21213,p93,SERC,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,simpson -p21219,p93,SERC,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,todd -p21221,p93,SERC,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,trigg -p21227,p93,SERC,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,warren -p21231,p93,SERC,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,wayne -p13001,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,appling -p13003,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,atkinson -p13005,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,bacon -p13007,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,baker -p13009,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,baldwin -p13011,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,banks -p13013,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,barrow -p13015,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,bartow -p13017,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,ben hill -p13019,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,berrien -p13021,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,bibb -p13023,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,bleckley -p13025,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,brantley -p13027,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,brooks -p13029,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,bryan -p13031,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,bulloch -p13033,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,burke -p13035,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,butts -p13037,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,calhoun -p13039,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,camden -p13043,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,candler -p13045,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,carroll -p13047,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,catoosa -p13049,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,charlton -p13051,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,chatham -p13053,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,chattahoochee -p13055,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,chattooga -p13057,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,cherokee -p13059,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,clarke -p13061,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,clay -p13063,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,clayton -p13065,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,clinch -p13067,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,cobb -p13069,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,coffee -p13071,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,colquitt -p13073,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,columbia -p13075,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,cook -p13077,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,coweta -p13079,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,crawford -p13081,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,crisp -p13083,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,dade -p13085,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,dawson -p13087,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,decatur -p13089,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,dekalb -p13091,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,dodge -p13093,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,dooly -p13095,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,dougherty -p13097,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,douglas -p13099,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,early -p13101,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,echols -p13103,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,effingham -p13105,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,elbert -p13107,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,emanuel -p13109,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,evans -p13111,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,fannin -p13113,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,fayette -p13115,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,floyd -p13117,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,forsyth -p13119,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,franklin -p13121,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,fulton -p13123,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,gilmer -p13125,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,glascock -p13127,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,glynn -p13129,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,gordon -p13131,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,grady -p13133,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,greene -p13135,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,gwinnett -p13137,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,habersham -p13139,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,hall -p13141,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,hancock -p13143,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,haralson -p13145,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,harris -p13147,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,hart -p13149,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,heard -p13151,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,henry -p13153,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,houston -p13155,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,irwin -p13157,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,jackson -p13159,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,jasper -p13161,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,jeff davis -p13163,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,jefferson -p13165,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,jenkins -p13167,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,johnson -p13169,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,jones -p13171,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,lamar -p13173,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,lanier -p13175,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,laurens -p13177,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,lee -p13179,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,liberty -p13181,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,lincoln -p13183,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,long -p13185,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,lowndes -p13187,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,lumpkin -p13189,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,mcduffie -p13191,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,mcintosh -p13193,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,macon -p13195,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,madison -p13197,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,marion -p13199,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,meriwether -p13201,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,miller -p13205,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,mitchell -p13207,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,monroe -p13209,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,montgomery -p13211,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,morgan -p13213,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,murray -p13215,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,muscogee -p13217,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,newton -p13219,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,oconee -p13221,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,oglethorpe -p13223,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,paulding -p13225,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,peach -p13227,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,pickens -p13229,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,pierce -p13231,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,pike -p13233,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,polk -p13235,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,pulaski -p13237,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,putnam -p13239,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,quitman -p13241,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,rabun -p13243,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,randolph -p13245,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,richmond -p13247,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,rockdale -p13249,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,schley -p13251,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,screven -p13253,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,seminole -p13255,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,spalding -p13257,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,stephens -p13259,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,stewart -p13261,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,sumter -p13263,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,talbot -p13265,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,taliaferro -p13267,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,tattnall -p13269,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,taylor -p13271,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,telfair -p13273,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,terrell -p13275,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,thomas -p13277,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,tift -p13279,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,toombs -p13281,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,towns -p13283,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,treutlen -p13285,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,troup -p13287,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,turner -p13289,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,twiggs -p13291,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,union -p13293,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,upson -p13295,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,walker -p13297,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,walton -p13299,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,ware -p13301,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,warren -p13303,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,washington -p13305,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,wayne -p13307,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,webster -p13309,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,wheeler -p13311,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,white -p13313,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,whitfield -p13315,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,wilcox -p13317,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,wilkes -p13319,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,wilkinson -p13321,p94,SERC,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,worth -p45001,p95,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,abbeville -p45007,p95,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,anderson -p45021,p95,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,cherokee -p45023,p95,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,chester -p45045,p95,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,greenville -p45047,p95,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,greenwood -p45057,p95,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,lancaster -p45059,p95,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,laurens -p45073,p95,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,oconee -p45077,p95,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,pickens -p45083,p95,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,spartanburg -p45087,p95,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,union -p45091,p95,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,york -p45003,p96,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,aiken -p45005,p96,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,allendale -p45009,p96,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,bamberg -p45011,p96,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,barnwell -p45013,p96,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,beaufort -p45015,p96,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,berkeley -p45017,p96,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,calhoun -p45019,p96,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,charleston -p45025,p96,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,chesterfield -p45027,p96,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,clarendon -p45029,p96,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,colleton -p45031,p96,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,darlington -p45033,p96,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,dillon -p45035,p96,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,dorchester -p45037,p96,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,edgefield -p45039,p96,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,fairfield -p45041,p96,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,florence -p45043,p96,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,georgetown -p45049,p96,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,hampton -p45051,p96,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,horry -p45053,p96,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,jasper -p45055,p96,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,kershaw -p45061,p96,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,lee -p45063,p96,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,lexington -p45065,p96,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,mccormick -p45067,p96,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,marion -p45069,p96,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,marlboro -p45071,p96,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,newberry -p45075,p96,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,orangeburg -p45079,p96,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,richland -p45081,p96,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,saluda -p45085,p96,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,sumter -p45089,p96,SERC,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,williamsburg -p37003,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,alexander -p37005,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,alleghany -p37007,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,anson -p37009,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,ashe -p37011,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,avery -p37021,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,buncombe -p37023,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,burke -p37025,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,cabarrus -p37027,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,caldwell -p37035,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,catawba -p37039,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,cherokee -p37043,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,clay -p37045,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,cleveland -p37057,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,davidson -p37059,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,davie -p37067,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,forsyth -p37071,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,gaston -p37075,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,graham -p37081,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,guilford -p37087,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,haywood -p37089,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,henderson -p37097,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,iredell -p37099,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,jackson -p37109,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,lincoln -p37111,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,mcdowell -p37113,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,macon -p37115,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,madison -p37119,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,mecklenburg -p37121,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,mitchell -p37149,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,polk -p37151,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,randolph -p37157,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,rockingham -p37159,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,rowan -p37161,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,rutherford -p37167,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,stanly -p37169,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,stokes -p37171,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,surry -p37173,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,swain -p37175,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,transylvania -p37179,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,union -p37189,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,watauga -p37193,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,wilkes -p37197,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,yadkin -p37199,p97,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,yancey -p37001,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,alamance -p37013,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,beaufort -p37015,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,bertie -p37017,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,bladen -p37019,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,brunswick -p37029,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,camden -p37031,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,carteret -p37033,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,caswell -p37037,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,chatham -p37041,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,chowan -p37047,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,columbus -p37049,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,craven -p37051,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,cumberland -p37053,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,currituck -p37055,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,dare -p37061,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,duplin -p37063,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,durham -p37065,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,edgecombe -p37069,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,franklin -p37073,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,gates -p37077,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,granville -p37079,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,greene -p37083,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,halifax -p37085,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,harnett -p37091,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,hertford -p37093,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,hoke -p37095,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,hyde -p37101,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,johnston -p37103,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,jones -p37105,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,lee -p37107,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,lenoir -p37117,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,martin -p37123,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,montgomery -p37125,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,moore -p37127,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,nash -p37129,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,new hanover -p37131,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,northampton -p37133,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,onslow -p37135,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,orange -p37137,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,pamlico -p37139,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,pasquotank -p37141,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,pender -p37143,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,perquimans -p37145,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,person -p37147,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,pitt -p37153,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,richmond -p37155,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,robeson -p37163,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,sampson -p37165,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,scotland -p37177,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,tyrrell -p37181,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,vance -p37183,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,wake -p37185,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,warren -p37187,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,washington -p37191,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,wayne -p37195,p98,SERC,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,wilson +p28003,p88,SERC_C,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,alcorn +p28009,p88,SERC_C,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,benton +p28013,p88,SERC_C,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,calhoun +p28017,p88,SERC_C,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,chickasaw +p28019,p88,SERC_C,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,choctaw +p28025,p88,SERC_C,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,clay +p28057,p88,SERC_C,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,itawamba +p28069,p88,SERC_C,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,kemper +p28071,p88,SERC_C,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,lafayette +p28081,p88,SERC_C,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,lee +p28087,p88,SERC_C,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,lowndes +p28093,p88,SERC_C,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,marshall +p28095,p88,SERC_C,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,monroe +p28099,p88,SERC_C,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,neshoba +p28103,p88,SERC_C,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,noxubee +p28105,p88,SERC_C,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,oktibbeha +p28115,p88,SERC_C,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,pontotoc +p28117,p88,SERC_C,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,prentiss +p28139,p88,SERC_C,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,tippah +p28141,p88,SERC_C,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,tishomingo +p28145,p88,SERC_C,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,union +p28155,p88,SERC_C,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,webster +p28159,p88,SERC_C,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,winston +p28161,p88,SERC_C,SERTP,SERTP,ESC,MS,eastern,MS,USA,delta-states,MS_SERTP,yalobusha +p01007,p89,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,bibb +p01023,p89,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,choctaw +p01033,p89,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,colbert +p01043,p89,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,cullman +p01049,p89,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,dekalb +p01057,p89,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,fayette +p01059,p89,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,franklin +p01063,p89,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,greene +p01065,p89,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,hale +p01071,p89,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,jackson +p01073,p89,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,jefferson +p01075,p89,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,lamar +p01077,p89,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,lauderdale +p01079,p89,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,lawrence +p01083,p89,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,limestone +p01089,p89,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,madison +p01091,p89,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,marengo +p01093,p89,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,marion +p01095,p89,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,marshall +p01103,p89,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,morgan +p01105,p89,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,perry +p01107,p89,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,pickens +p01117,p89,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,shelby +p01119,p89,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,sumter +p01125,p89,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,tuscaloosa +p01127,p89,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,walker +p01131,p89,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,wilcox +p01133,p89,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,winston +p01001,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,autauga +p01003,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,baldwin +p01005,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,barbour +p01009,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,blount +p01011,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,bullock +p01013,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,butler +p01015,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,calhoun +p01017,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,chambers +p01019,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,cherokee +p01021,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,chilton +p01025,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,clarke +p01027,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,clay +p01029,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,cleburne +p01031,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,coffee +p01035,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,conecuh +p01037,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,coosa +p01039,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,covington +p01041,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,crenshaw +p01045,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,dale +p01047,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,dallas +p01051,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,elmore +p01053,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,escambia +p01055,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,etowah +p01061,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,geneva +p01067,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,henry +p01069,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,houston +p01081,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,lee +p01085,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,lowndes +p01087,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,macon +p01097,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,mobile +p01099,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,monroe +p01101,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,montgomery +p01109,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,pike +p01111,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,randolph +p01113,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,russell +p01115,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,st. clair +p01121,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,talladega +p01123,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,tallapoosa +p01129,p90,SERC_SE,SERTP,SERTP,ESC,AL,eastern,AL,USA,southeast,AL,washington +p12005,p91,SERC_SE,SERTP,SERTP,SA,FL,eastern,FL,USA,southeast,FL_SERTP,bay +p12013,p91,SERC_SE,SERTP,SERTP,SA,FL,eastern,FL,USA,southeast,FL_SERTP,calhoun +p12033,p91,SERC_SE,SERTP,SERTP,SA,FL,eastern,FL,USA,southeast,FL_SERTP,escambia +p12045,p91,SERC_SE,SERTP,SERTP,SA,FL,eastern,FL,USA,southeast,FL_SERTP,gulf +p12059,p91,SERC_SE,SERTP,SERTP,SA,FL,eastern,FL,USA,southeast,FL_SERTP,holmes +p12063,p91,SERC_SE,SERTP,SERTP,SA,FL,eastern,FL,USA,southeast,FL_SERTP,jackson +p12091,p91,SERC_SE,SERTP,SERTP,SA,FL,eastern,FL,USA,southeast,FL_SERTP,okaloosa +p12113,p91,SERC_SE,SERTP,SERTP,SA,FL,eastern,FL,USA,southeast,FL_SERTP,santa rosa +p12131,p91,SERC_SE,SERTP,SERTP,SA,FL,eastern,FL,USA,southeast,FL_SERTP,walton +p12133,p91,SERC_SE,SERTP,SERTP,SA,FL,eastern,FL,USA,southeast,FL_SERTP,washington +p47001,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,anderson +p47003,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,bedford +p47005,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,benton +p47007,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,bledsoe +p47009,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,blount +p47011,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,bradley +p47013,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,campbell +p47015,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,cannon +p47017,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,carroll +p47019,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,carter +p47021,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,cheatham +p47023,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,chester +p47025,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,claiborne +p47027,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,clay +p47029,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,cocke +p47031,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,coffee +p47033,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,crockett +p47035,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,cumberland +p47037,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,davidson +p47039,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,decatur +p47041,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,dekalb +p47043,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,dickson +p47045,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,dyer +p47047,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,fayette +p47049,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,fentress +p47051,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,franklin +p47053,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,gibson +p47055,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,giles +p47057,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,grainger +p47059,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,greene +p47061,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,grundy +p47063,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,hamblen +p47065,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,hamilton +p47067,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,hancock +p47069,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,hardeman +p47071,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,hardin +p47073,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,hawkins +p47075,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,haywood +p47077,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,henderson +p47079,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,henry +p47081,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,hickman +p47083,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,houston +p47085,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,humphreys +p47087,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,jackson +p47089,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,jefferson +p47091,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,johnson +p47093,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,knox +p47095,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,lake +p47097,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,lauderdale +p47099,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,lawrence +p47101,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,lewis +p47103,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,lincoln +p47105,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,loudon +p47107,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,mcminn +p47109,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,mcnairy +p47111,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,macon +p47113,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,madison +p47115,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,marion +p47117,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,marshall +p47119,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,maury +p47121,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,meigs +p47123,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,monroe +p47125,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,montgomery +p47127,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,moore +p47129,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,morgan +p47131,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,obion +p47133,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,overton +p47135,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,perry +p47137,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,pickett +p47139,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,polk +p47141,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,putnam +p47143,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,rhea +p47145,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,roane +p47147,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,robertson +p47149,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,rutherford +p47151,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,scott +p47153,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,sequatchie +p47155,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,sevier +p47157,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,shelby +p47159,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,smith +p47161,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,stewart +p47163,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,sullivan +p47165,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,sumner +p47167,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,tipton +p47169,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,trousdale +p47171,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,unicoi +p47173,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,union +p47175,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,van buren +p47177,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,warren +p47179,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,washington +p47181,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,wayne +p47183,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,weakley +p47185,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,white +p47187,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,williamson +p47189,p92,SERC_C,SERTP,SERTP,ESC,TN,eastern,TN,USA,appalachia,TN,wilson +p21003,p93,SERC_C,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,allen +p21007,p93,SERC_C,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,ballard +p21031,p93,SERC_C,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,butler +p21035,p93,SERC_C,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,calloway +p21039,p93,SERC_C,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,carlisle +p21047,p93,SERC_C,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,christian +p21053,p93,SERC_C,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,clinton +p21057,p93,SERC_C,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,cumberland +p21075,p93,SERC_C,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,fulton +p21083,p93,SERC_C,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,graves +p21105,p93,SERC_C,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,hickman +p21139,p93,SERC_C,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,livingston +p21141,p93,SERC_C,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,logan +p21143,p93,SERC_C,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,lyon +p21145,p93,SERC_C,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,mccracken +p21147,p93,SERC_C,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,mccreary +p21157,p93,SERC_C,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,marshall +p21169,p93,SERC_C,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,metcalfe +p21171,p93,SERC_C,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,monroe +p21199,p93,SERC_C,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,pulaski +p21207,p93,SERC_C,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,russell +p21213,p93,SERC_C,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,simpson +p21219,p93,SERC_C,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,todd +p21221,p93,SERC_C,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,trigg +p21227,p93,SERC_C,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,warren +p21231,p93,SERC_C,SERTP,SERTP,ESC,KY,eastern,KY,USA,appalachia,KY_SERTP,wayne +p13001,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,appling +p13003,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,atkinson +p13005,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,bacon +p13007,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,baker +p13009,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,baldwin +p13011,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,banks +p13013,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,barrow +p13015,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,bartow +p13017,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,ben hill +p13019,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,berrien +p13021,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,bibb +p13023,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,bleckley +p13025,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,brantley +p13027,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,brooks +p13029,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,bryan +p13031,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,bulloch +p13033,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,burke +p13035,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,butts +p13037,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,calhoun +p13039,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,camden +p13043,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,candler +p13045,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,carroll +p13047,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,catoosa +p13049,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,charlton +p13051,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,chatham +p13053,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,chattahoochee +p13055,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,chattooga +p13057,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,cherokee +p13059,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,clarke +p13061,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,clay +p13063,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,clayton +p13065,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,clinch +p13067,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,cobb +p13069,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,coffee +p13071,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,colquitt +p13073,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,columbia +p13075,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,cook +p13077,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,coweta +p13079,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,crawford +p13081,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,crisp +p13083,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,dade +p13085,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,dawson +p13087,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,decatur +p13089,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,dekalb +p13091,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,dodge +p13093,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,dooly +p13095,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,dougherty +p13097,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,douglas +p13099,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,early +p13101,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,echols +p13103,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,effingham +p13105,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,elbert +p13107,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,emanuel +p13109,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,evans +p13111,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,fannin +p13113,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,fayette +p13115,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,floyd +p13117,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,forsyth +p13119,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,franklin +p13121,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,fulton +p13123,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,gilmer +p13125,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,glascock +p13127,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,glynn +p13129,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,gordon +p13131,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,grady +p13133,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,greene +p13135,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,gwinnett +p13137,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,habersham +p13139,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,hall +p13141,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,hancock +p13143,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,haralson +p13145,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,harris +p13147,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,hart +p13149,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,heard +p13151,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,henry +p13153,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,houston +p13155,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,irwin +p13157,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,jackson +p13159,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,jasper +p13161,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,jeff davis +p13163,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,jefferson +p13165,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,jenkins +p13167,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,johnson +p13169,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,jones +p13171,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,lamar +p13173,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,lanier +p13175,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,laurens +p13177,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,lee +p13179,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,liberty +p13181,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,lincoln +p13183,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,long +p13185,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,lowndes +p13187,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,lumpkin +p13189,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,mcduffie +p13191,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,mcintosh +p13193,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,macon +p13195,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,madison +p13197,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,marion +p13199,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,meriwether +p13201,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,miller +p13205,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,mitchell +p13207,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,monroe +p13209,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,montgomery +p13211,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,morgan +p13213,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,murray +p13215,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,muscogee +p13217,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,newton +p13219,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,oconee +p13221,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,oglethorpe +p13223,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,paulding +p13225,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,peach +p13227,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,pickens +p13229,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,pierce +p13231,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,pike +p13233,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,polk +p13235,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,pulaski +p13237,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,putnam +p13239,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,quitman +p13241,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,rabun +p13243,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,randolph +p13245,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,richmond +p13247,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,rockdale +p13249,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,schley +p13251,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,screven +p13253,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,seminole +p13255,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,spalding +p13257,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,stephens +p13259,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,stewart +p13261,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,sumter +p13263,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,talbot +p13265,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,taliaferro +p13267,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,tattnall +p13269,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,taylor +p13271,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,telfair +p13273,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,terrell +p13275,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,thomas +p13277,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,tift +p13279,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,toombs +p13281,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,towns +p13283,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,treutlen +p13285,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,troup +p13287,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,turner +p13289,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,twiggs +p13291,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,union +p13293,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,upson +p13295,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,walker +p13297,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,walton +p13299,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,ware +p13301,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,warren +p13303,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,washington +p13305,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,wayne +p13307,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,webster +p13309,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,wheeler +p13311,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,white +p13313,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,whitfield +p13315,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,wilcox +p13317,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,wilkes +p13319,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,wilkinson +p13321,p94,SERC_SE,SERTP,SERTP,SA,GA,eastern,GA,USA,southeast,GA,worth +p45001,p95,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,abbeville +p45007,p95,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,anderson +p45021,p95,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,cherokee +p45023,p95,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,chester +p45045,p95,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,greenville +p45047,p95,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,greenwood +p45057,p95,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,lancaster +p45059,p95,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,laurens +p45073,p95,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,oconee +p45077,p95,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,pickens +p45083,p95,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,spartanburg +p45087,p95,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,union +p45091,p95,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,york +p45003,p96,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,aiken +p45005,p96,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,allendale +p45009,p96,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,bamberg +p45011,p96,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,barnwell +p45013,p96,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,beaufort +p45015,p96,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,berkeley +p45017,p96,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,calhoun +p45019,p96,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,charleston +p45025,p96,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,chesterfield +p45027,p96,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,clarendon +p45029,p96,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,colleton +p45031,p96,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,darlington +p45033,p96,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,dillon +p45035,p96,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,dorchester +p45037,p96,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,edgefield +p45039,p96,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,fairfield +p45041,p96,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,florence +p45043,p96,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,georgetown +p45049,p96,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,hampton +p45051,p96,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,horry +p45053,p96,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,jasper +p45055,p96,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,kershaw +p45061,p96,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,lee +p45063,p96,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,lexington +p45065,p96,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,mccormick +p45067,p96,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,marion +p45069,p96,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,marlboro +p45071,p96,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,newberry +p45075,p96,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,orangeburg +p45079,p96,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,richland +p45081,p96,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,saluda +p45085,p96,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,sumter +p45089,p96,SERC_E,SERTP,SERTP,SA,SC,eastern,SC,USA,southeast,SC,williamsburg +p37003,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,alexander +p37005,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,alleghany +p37007,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,anson +p37009,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,ashe +p37011,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,avery +p37021,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,buncombe +p37023,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,burke +p37025,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,cabarrus +p37027,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,caldwell +p37035,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,catawba +p37039,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,cherokee +p37043,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,clay +p37045,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,cleveland +p37057,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,davidson +p37059,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,davie +p37067,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,forsyth +p37071,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,gaston +p37075,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,graham +p37081,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,guilford +p37087,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,haywood +p37089,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,henderson +p37097,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,iredell +p37099,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,jackson +p37109,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,lincoln +p37111,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,mcdowell +p37113,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,macon +p37115,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,madison +p37119,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,mecklenburg +p37121,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,mitchell +p37149,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,polk +p37151,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,randolph +p37157,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,rockingham +p37159,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,rowan +p37161,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,rutherford +p37167,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,stanly +p37169,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,stokes +p37171,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,surry +p37173,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,swain +p37175,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,transylvania +p37179,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,union +p37189,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,watauga +p37193,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,wilkes +p37197,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,yadkin +p37199,p97,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,yancey +p37001,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,alamance +p37013,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,beaufort +p37015,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,bertie +p37017,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,bladen +p37019,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,brunswick +p37029,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,camden +p37031,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,carteret +p37033,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,caswell +p37037,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,chatham +p37041,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,chowan +p37047,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,columbus +p37049,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,craven +p37051,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,cumberland +p37053,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,currituck +p37055,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,dare +p37061,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,duplin +p37063,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,durham +p37065,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,edgecombe +p37069,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,franklin +p37073,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,gates +p37077,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,granville +p37079,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,greene +p37083,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,halifax +p37085,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,harnett +p37091,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,hertford +p37093,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,hoke +p37095,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,hyde +p37101,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,johnston +p37103,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,jones +p37105,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,lee +p37107,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,lenoir +p37117,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,martin +p37123,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,montgomery +p37125,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,moore +p37127,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,nash +p37129,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,new hanover +p37131,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,northampton +p37133,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,onslow +p37135,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,orange +p37137,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,pamlico +p37139,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,pasquotank +p37141,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,pender +p37143,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,perquimans +p37145,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,person +p37147,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,pitt +p37153,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,richmond +p37155,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,robeson +p37163,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,sampson +p37165,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,scotland +p37177,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,tyrrell +p37181,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,vance +p37183,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,wake +p37185,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,warren +p37187,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,washington +p37191,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,wayne +p37195,p98,SERC_E,SERTP,SERTP,SA,NC,eastern,NC,USA,appalachia,NC,wilson p51003,p99,PJM,PJM,PJM_East,SA,VA,eastern,VA,USA,appalachia,VA,albemarle p51005,p99,PJM,PJM,PJM_East,SA,VA,eastern,VA,USA,appalachia,VA,alleghany p51007,p99,PJM,PJM,PJM_East,SA,VA,eastern,VA,USA,appalachia,VA,amelia @@ -2368,63 +2368,63 @@ p51139,p100,PJM,PJM,PJM_East,SA,VA,eastern,VA,USA,appalachia,VA,page p51157,p100,PJM,PJM,PJM_East,SA,VA,eastern,VA,USA,appalachia,VA,rappahannock p51187,p100,PJM,PJM,PJM_East,SA,VA,eastern,VA,USA,appalachia,VA,warren p51840,p100,PJM,PJM,PJM_East,SA,VA,eastern,VA,USA,appalachia,VA,winchester -p12001,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,alachua -p12003,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,baker -p12007,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,bradford -p12015,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,charlotte -p12017,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,citrus -p12019,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,clay -p12021,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,collier -p12023,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,columbia -p12027,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,desoto -p12029,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,dixie -p12031,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,duval -p12035,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,flagler -p12037,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,franklin -p12039,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,gadsden -p12041,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,gilchrist -p12043,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,glades -p12047,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,hamilton -p12049,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,hardee -p12051,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,hendry -p12053,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,hernando -p12055,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,highlands -p12057,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,hillsborough -p12065,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,jefferson -p12067,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,lafayette -p12069,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,lake -p12071,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,lee -p12073,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,leon -p12075,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,levy -p12077,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,liberty -p12079,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,madison -p12081,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,manatee -p12083,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,marion -p12087,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,monroe -p12089,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,nassau -p12093,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,okeechobee -p12095,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,orange -p12097,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,osceola -p12101,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,pasco -p12103,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,pinellas -p12105,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,polk -p12107,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,putnam -p12109,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,st. johns -p12115,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,sarasota -p12117,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,seminole -p12119,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,sumter -p12121,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,suwannee -p12123,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,taylor -p12125,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,union -p12127,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,volusia -p12129,p101,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,wakulla -p12009,p102,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,brevard -p12011,p102,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,broward -p12061,p102,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,indian river -p12085,p102,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,martin -p12086,p102,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,miami-dade -p12099,p102,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,palm beach -p12111,p102,SERC,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,st. lucie +p12001,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,alachua +p12003,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,baker +p12007,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,bradford +p12015,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,charlotte +p12017,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,citrus +p12019,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,clay +p12021,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,collier +p12023,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,columbia +p12027,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,desoto +p12029,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,dixie +p12031,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,duval +p12035,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,flagler +p12037,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,franklin +p12039,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,gadsden +p12041,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,gilchrist +p12043,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,glades +p12047,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,hamilton +p12049,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,hardee +p12051,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,hendry +p12053,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,hernando +p12055,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,highlands +p12057,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,hillsborough +p12065,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,jefferson +p12067,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,lafayette +p12069,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,lake +p12071,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,lee +p12073,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,leon +p12075,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,levy +p12077,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,liberty +p12079,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,madison +p12081,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,manatee +p12083,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,marion +p12087,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,monroe +p12089,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,nassau +p12093,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,okeechobee +p12095,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,orange +p12097,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,osceola +p12101,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,pasco +p12103,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,pinellas +p12105,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,polk +p12107,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,putnam +p12109,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,st. johns +p12115,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,sarasota +p12117,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,seminole +p12119,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,sumter +p12121,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,suwannee +p12123,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,taylor +p12125,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,union +p12127,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,volusia +p12129,p101,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,wakulla +p12009,p102,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,brevard +p12011,p102,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,broward +p12061,p102,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,indian river +p12085,p102,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,martin +p12086,p102,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,miami-dade +p12099,p102,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,palm beach +p12111,p102,SERC_F,FRCC,FRCC,SA,FL,eastern,FL,USA,southeast,FL_FRCC,st. lucie p26001,p103,MISO,MISO,MISO_Central,ENC,MI,eastern,MI,USA,lake-states,MI_MISO,alcona p26005,p103,MISO,MISO,MISO_Central,ENC,MI,eastern,MI,USA,lake-states,MI_MISO,allegan p26007,p103,MISO,MISO,MISO_Central,ENC,MI,eastern,MI,USA,lake-states,MI_MISO,alpena diff --git a/inputs/hierarchy_agg2.csv b/inputs/hierarchy_agg2.csv index 89ccc5f..b8b29af 100644 --- a/inputs/hierarchy_agg2.csv +++ b/inputs/hierarchy_agg2.csv @@ -1,4 +1,4 @@ -*county,ba,nercr,transreg,transgrp,cendiv,st,interconnect,st_interconnect,country,usda_region,aggreg,county_name +county,ba,nercr,transreg,transgrp,cendiv,st,interconnect,st_interconnect,country,usda_region,aggreg,county_name p53007,p1,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,chelan p53029,p1,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,island p53033,p1,WECC_NWPP,NorthernGrid,NorthernGrid_West,PA,WA,western,WA,USA,pacific,WA,king diff --git a/inputs/reserves/README.md b/inputs/reserves/README.md new file mode 100644 index 0000000..3d5e7f7 --- /dev/null +++ b/inputs/reserves/README.md @@ -0,0 +1,3 @@ +# Input files +* `net_firm_transfers_nerc.csv`: Taken from the NERC Long-Term Reliability Assessment (LTRA) 2023: https://www.nerc.com/pa/RAPA/ra/Reliability%20Assessments%20DL/NERC_LTRA_2023.pdf (specifically the "Net Firm Capacity Transfers" reported for each reliability region) +* `prm_annual.csv`: Taken from the 2023 NERC LTRA (specifically the "Reference Margin Level (%)" reported for each reliability region) diff --git a/inputs/reserves/net_firm_transfers_nerc.csv b/inputs/reserves/net_firm_transfers_nerc.csv new file mode 100644 index 0000000..b3d123d --- /dev/null +++ b/inputs/reserves/net_firm_transfers_nerc.csv @@ -0,0 +1,79 @@ +nercr,t,source,MW +MISO,2024,NERC_LTRA_2023,2125 +NPCC_NE,2024,NERC_LTRA_2023,1297 +NPCC_NE,2025,NERC_LTRA_2023,1504 +NPCC_NY,2024,NERC_LTRA_2023,1932 +NPCC_NY,2025,NERC_LTRA_2023,1815 +NPCC_NY,2026,NERC_LTRA_2023,3212 +NPCC_NY,2027,NERC_LTRA_2023,3518 +PJM,2024,NERC_LTRA_2023,-607 +PJM,2025,NERC_LTRA_2023,-105 +PJM,2026,NERC_LTRA_2023,0 +SERC_E,2024,NERC_LTRA_2023,624 +SERC_E,2025,NERC_LTRA_2023,624 +SERC_E,2026,NERC_LTRA_2023,624 +SERC_E,2027,NERC_LTRA_2023,624 +SERC_E,2028,NERC_LTRA_2023,624 +SERC_E,2029,NERC_LTRA_2023,624 +SERC_E,2030,NERC_LTRA_2023,624 +SERC_E,2031,NERC_LTRA_2023,624 +SERC_E,2032,NERC_LTRA_2023,624 +SERC_E,2033,NERC_LTRA_2023,624 +SERC_C,2024,NERC_LTRA_2023,198 +SERC_C,2025,NERC_LTRA_2023,-677 +SERC_C,2026,NERC_LTRA_2023,-677 +SERC_C,2027,NERC_LTRA_2023,-677 +SERC_C,2028,NERC_LTRA_2023,-677 +SERC_C,2029,NERC_LTRA_2023,-677 +SERC_C,2030,NERC_LTRA_2023,-677 +SERC_C,2031,NERC_LTRA_2023,-677 +SERC_C,2032,NERC_LTRA_2023,-677 +SERC_C,2033,NERC_LTRA_2023,-677 +SERC_F,2024,NERC_LTRA_2023,594 +SERC_F,2025,NERC_LTRA_2023,700 +SERC_F,2026,NERC_LTRA_2023,499 +SERC_F,2027,NERC_LTRA_2023,499 +SERC_F,2028,NERC_LTRA_2023,406 +SERC_F,2029,NERC_LTRA_2023,406 +SERC_F,2030,NERC_LTRA_2023,406 +SERC_F,2031,NERC_LTRA_2023,406 +SERC_F,2032,NERC_LTRA_2023,406 +SERC_F,2033,NERC_LTRA_2023,406 +SERC_SE,2024,NERC_LTRA_2023,-971 +SERC_SE,2025,NERC_LTRA_2023,-471 +SERC_SE,2026,NERC_LTRA_2023,-471 +SERC_SE,2027,NERC_LTRA_2023,-471 +SERC_SE,2028,NERC_LTRA_2023,-471 +SERC_SE,2029,NERC_LTRA_2023,-471 +SERC_SE,2030,NERC_LTRA_2023,-256 +SERC_SE,2031,NERC_LTRA_2023,-256 +SERC_SE,2032,NERC_LTRA_2023,-256 +SERC_SE,2033,NERC_LTRA_2023,-256 +SPP,2024,NERC_LTRA_2023,-404 +SPP,2025,NERC_LTRA_2023,-384 +SPP,2026,NERC_LTRA_2023,-364 +SPP,2027,NERC_LTRA_2023,-474 +SPP,2028,NERC_LTRA_2023,-469 +SPP,2029,NERC_LTRA_2023,-469 +SPP,2030,NERC_LTRA_2023,-400 +ERCOT,2024,NERC_LTRA_2023,20 +WECC_CA,2024,NERC_LTRA_2023,0 +WECC_CA,2025,NERC_LTRA_2023,0 +WECC_CA,2026,NERC_LTRA_2023,161 +WECC_CA,2027,NERC_LTRA_2023,338 +WECC_CA,2028,NERC_LTRA_2023,521 +WECC_CA,2029,NERC_LTRA_2023,408 +WECC_CA,2030,NERC_LTRA_2023,1339 +WECC_CA,2031,NERC_LTRA_2023,1572 +WECC_NW,2024,NERC_LTRA_2023,1157 +WECC_NW,2025,NERC_LTRA_2023,1290 +WECC_NW,2026,NERC_LTRA_2023,6785 +WECC_NW,2027,NERC_LTRA_2023,8002 +WECC_NW,2028,NERC_LTRA_2023,9826 +WECC_SW,2024,NERC_LTRA_2023,1676 +WECC_SW,2025,NERC_LTRA_2023,2316 +WECC_SW,2026,NERC_LTRA_2023,3148 +WECC_SW,2027,NERC_LTRA_2023,3824 +WECC_SW,2028,NERC_LTRA_2023,4731 +WECC_SW,2029,NERC_LTRA_2023,5324 +WECC_SW,2030,NERC_LTRA_2023,5736 diff --git a/inputs/reserves/prm_annual.csv b/inputs/reserves/prm_annual.csv index 1b0cf28..30fc852 100644 --- a/inputs/reserves/prm_annual.csv +++ b/inputs/reserves/prm_annual.csv @@ -1,311 +1,352 @@ -*nercr,t,none,static,ramp2025_20by50,ramp2025_25by50,ramp2025_30by50 -MISO,2020,0,0.183,0.183,0.183,0.183 -MISO,2021,0,0.183,0.183,0.183,0.183 -MISO,2022,0,0.183,0.183,0.183,0.183 -MISO,2023,0,0.183,0.183,0.183,0.183 -MISO,2024,0,0.183,0.183,0.183,0.183 -MISO,2025,0,0.183,0.183,0.183,0.183 -MISO,2026,0,0.183,0.18368,0.18568,0.18768 -MISO,2027,0,0.183,0.18436,0.18836,0.19236 -MISO,2028,0,0.183,0.18504,0.19104,0.19704 -MISO,2029,0,0.183,0.18572,0.19372,0.20172 -MISO,2030,0,0.183,0.1864,0.1964,0.2064 -MISO,2031,0,0.183,0.18708,0.19908,0.21108 -MISO,2032,0,0.183,0.18776,0.20176,0.21576 -MISO,2033,0,0.183,0.18844,0.20444,0.22044 -MISO,2034,0,0.183,0.18912,0.20712,0.22512 -MISO,2035,0,0.183,0.1898,0.2098,0.2298 -MISO,2036,0,0.183,0.19048,0.21248,0.23448 -MISO,2037,0,0.183,0.19116,0.21516,0.23916 -MISO,2038,0,0.183,0.19184,0.21784,0.24384 -MISO,2039,0,0.183,0.19252,0.22052,0.24852 -MISO,2040,0,0.183,0.1932,0.2232,0.2532 -MISO,2041,0,0.183,0.19388,0.22588,0.25788 -MISO,2042,0,0.183,0.19456,0.22856,0.26256 -MISO,2043,0,0.183,0.19524,0.23124,0.26724 -MISO,2044,0,0.183,0.19592,0.23392,0.27192 -MISO,2045,0,0.183,0.1966,0.2366,0.2766 -MISO,2046,0,0.183,0.19728,0.23928,0.28128 -MISO,2047,0,0.183,0.19796,0.24196,0.28596 -MISO,2048,0,0.183,0.19864,0.24464,0.29064 -MISO,2049,0,0.183,0.19932,0.24732,0.29532 -MISO,2050,0,0.183,0.2,0.25,0.3 -NPCC_NE,2020,0,0.135,0.135,0.135,0.135 -NPCC_NE,2021,0,0.135,0.135,0.135,0.135 -NPCC_NE,2022,0,0.135,0.135,0.135,0.135 -NPCC_NE,2023,0,0.135,0.135,0.135,0.135 -NPCC_NE,2024,0,0.135,0.135,0.135,0.135 -NPCC_NE,2025,0,0.135,0.135,0.135,0.135 -NPCC_NE,2026,0,0.135,0.1376,0.1396,0.1416 -NPCC_NE,2027,0,0.135,0.1402,0.1442,0.1482 -NPCC_NE,2028,0,0.135,0.1428,0.1488,0.1548 -NPCC_NE,2029,0,0.135,0.1454,0.1534,0.1614 -NPCC_NE,2030,0,0.135,0.148,0.158,0.168 -NPCC_NE,2031,0,0.135,0.1506,0.1626,0.1746 -NPCC_NE,2032,0,0.135,0.1532,0.1672,0.1812 -NPCC_NE,2033,0,0.135,0.1558,0.1718,0.1878 -NPCC_NE,2034,0,0.135,0.1584,0.1764,0.1944 -NPCC_NE,2035,0,0.135,0.161,0.181,0.201 -NPCC_NE,2036,0,0.135,0.1636,0.1856,0.2076 -NPCC_NE,2037,0,0.135,0.1662,0.1902,0.2142 -NPCC_NE,2038,0,0.135,0.1688,0.1948,0.2208 -NPCC_NE,2039,0,0.135,0.1714,0.1994,0.2274 -NPCC_NE,2040,0,0.135,0.174,0.204,0.234 -NPCC_NE,2041,0,0.135,0.1766,0.2086,0.2406 -NPCC_NE,2042,0,0.135,0.1792,0.2132,0.2472 -NPCC_NE,2043,0,0.135,0.1818,0.2178,0.2538 -NPCC_NE,2044,0,0.135,0.1844,0.2224,0.2604 -NPCC_NE,2045,0,0.135,0.187,0.227,0.267 -NPCC_NE,2046,0,0.135,0.1896,0.2316,0.2736 -NPCC_NE,2047,0,0.135,0.1922,0.2362,0.2802 -NPCC_NE,2048,0,0.135,0.1948,0.2408,0.2868 -NPCC_NE,2049,0,0.135,0.1974,0.2454,0.2934 -NPCC_NE,2050,0,0.135,0.2,0.25,0.3 -NPCC_NY,2020,0,0.15,0.15,0.15,0.15 -NPCC_NY,2021,0,0.15,0.15,0.15,0.15 -NPCC_NY,2022,0,0.15,0.15,0.15,0.15 -NPCC_NY,2023,0,0.15,0.15,0.15,0.15 -NPCC_NY,2024,0,0.15,0.15,0.15,0.15 -NPCC_NY,2025,0,0.15,0.15,0.15,0.15 -NPCC_NY,2026,0,0.15,0.152,0.154,0.156 -NPCC_NY,2027,0,0.15,0.154,0.158,0.162 -NPCC_NY,2028,0,0.15,0.156,0.162,0.168 -NPCC_NY,2029,0,0.15,0.158,0.166,0.174 -NPCC_NY,2030,0,0.15,0.16,0.17,0.18 -NPCC_NY,2031,0,0.15,0.162,0.174,0.186 -NPCC_NY,2032,0,0.15,0.164,0.178,0.192 -NPCC_NY,2033,0,0.15,0.166,0.182,0.198 -NPCC_NY,2034,0,0.15,0.168,0.186,0.204 -NPCC_NY,2035,0,0.15,0.17,0.19,0.21 -NPCC_NY,2036,0,0.15,0.172,0.194,0.216 -NPCC_NY,2037,0,0.15,0.174,0.198,0.222 -NPCC_NY,2038,0,0.15,0.176,0.202,0.228 -NPCC_NY,2039,0,0.15,0.178,0.206,0.234 -NPCC_NY,2040,0,0.15,0.18,0.21,0.24 -NPCC_NY,2041,0,0.15,0.182,0.214,0.246 -NPCC_NY,2042,0,0.15,0.184,0.218,0.252 -NPCC_NY,2043,0,0.15,0.186,0.222,0.258 -NPCC_NY,2044,0,0.15,0.188,0.226,0.264 -NPCC_NY,2045,0,0.15,0.19,0.23,0.27 -NPCC_NY,2046,0,0.15,0.192,0.234,0.276 -NPCC_NY,2047,0,0.15,0.194,0.238,0.282 -NPCC_NY,2048,0,0.15,0.196,0.242,0.288 -NPCC_NY,2049,0,0.15,0.198,0.246,0.294 -NPCC_NY,2050,0,0.15,0.2,0.25,0.3 -PJM,2020,0,0.146,0.146,0.146,0.146 -PJM,2021,0,0.146,0.146,0.146,0.146 -PJM,2022,0,0.146,0.146,0.146,0.146 -PJM,2023,0,0.146,0.146,0.146,0.146 -PJM,2024,0,0.146,0.146,0.146,0.146 -PJM,2025,0,0.146,0.146,0.146,0.146 -PJM,2026,0,0.146,0.14816,0.15016,0.15216 -PJM,2027,0,0.146,0.15032,0.15432,0.15832 -PJM,2028,0,0.146,0.15248,0.15848,0.16448 -PJM,2029,0,0.146,0.15464,0.16264,0.17064 -PJM,2030,0,0.146,0.1568,0.1668,0.1768 -PJM,2031,0,0.146,0.15896,0.17096,0.18296 -PJM,2032,0,0.146,0.16112,0.17512,0.18912 -PJM,2033,0,0.146,0.16328,0.17928,0.19528 -PJM,2034,0,0.146,0.16544,0.18344,0.20144 -PJM,2035,0,0.146,0.1676,0.1876,0.2076 -PJM,2036,0,0.146,0.16976,0.19176,0.21376 -PJM,2037,0,0.146,0.17192,0.19592,0.21992 -PJM,2038,0,0.146,0.17408,0.20008,0.22608 -PJM,2039,0,0.146,0.17624,0.20424,0.23224 -PJM,2040,0,0.146,0.1784,0.2084,0.2384 -PJM,2041,0,0.146,0.18056,0.21256,0.24456 -PJM,2042,0,0.146,0.18272,0.21672,0.25072 -PJM,2043,0,0.146,0.18488,0.22088,0.25688 -PJM,2044,0,0.146,0.18704,0.22504,0.26304 -PJM,2045,0,0.146,0.1892,0.2292,0.2692 -PJM,2046,0,0.146,0.19136,0.23336,0.27536 -PJM,2047,0,0.146,0.19352,0.23752,0.28152 -PJM,2048,0,0.146,0.19568,0.24168,0.28768 -PJM,2049,0,0.146,0.19784,0.24584,0.29384 -PJM,2050,0,0.146,0.2,0.25,0.3 -SERC,2020,0,0.15,0.15,0.15,0.15 -SERC,2021,0,0.15,0.15,0.15,0.15 -SERC,2022,0,0.15,0.15,0.15,0.15 -SERC,2023,0,0.15,0.15,0.15,0.15 -SERC,2024,0,0.15,0.15,0.15,0.15 -SERC,2025,0,0.15,0.15,0.15,0.15 -SERC,2026,0,0.15,0.152,0.154,0.156 -SERC,2027,0,0.15,0.154,0.158,0.162 -SERC,2028,0,0.15,0.156,0.162,0.168 -SERC,2029,0,0.15,0.158,0.166,0.174 -SERC,2030,0,0.15,0.16,0.17,0.18 -SERC,2031,0,0.15,0.162,0.174,0.186 -SERC,2032,0,0.15,0.164,0.178,0.192 -SERC,2033,0,0.15,0.166,0.182,0.198 -SERC,2034,0,0.15,0.168,0.186,0.204 -SERC,2035,0,0.15,0.17,0.19,0.21 -SERC,2036,0,0.15,0.172,0.194,0.216 -SERC,2037,0,0.15,0.174,0.198,0.222 -SERC,2038,0,0.15,0.176,0.202,0.228 -SERC,2039,0,0.15,0.178,0.206,0.234 -SERC,2040,0,0.15,0.18,0.21,0.24 -SERC,2041,0,0.15,0.182,0.214,0.246 -SERC,2042,0,0.15,0.184,0.218,0.252 -SERC,2043,0,0.15,0.186,0.222,0.258 -SERC,2044,0,0.15,0.188,0.226,0.264 -SERC,2045,0,0.15,0.19,0.23,0.27 -SERC,2046,0,0.15,0.192,0.234,0.276 -SERC,2047,0,0.15,0.194,0.238,0.282 -SERC,2048,0,0.15,0.196,0.242,0.288 -SERC,2049,0,0.15,0.198,0.246,0.294 -SERC,2050,0,0.15,0.2,0.25,0.3 -SPP,2020,0,0.16,0.16,0.16,0.16 -SPP,2021,0,0.16,0.16,0.16,0.16 -SPP,2022,0,0.16,0.16,0.16,0.16 -SPP,2023,0,0.16,0.16,0.16,0.16 -SPP,2024,0,0.16,0.16,0.16,0.16 -SPP,2025,0,0.16,0.16,0.16,0.16 -SPP,2026,0,0.16,0.1616,0.1636,0.1656 -SPP,2027,0,0.16,0.1632,0.1672,0.1712 -SPP,2028,0,0.16,0.1648,0.1708,0.1768 -SPP,2029,0,0.16,0.1664,0.1744,0.1824 -SPP,2030,0,0.16,0.168,0.178,0.188 -SPP,2031,0,0.16,0.1696,0.1816,0.1936 -SPP,2032,0,0.16,0.1712,0.1852,0.1992 -SPP,2033,0,0.16,0.1728,0.1888,0.2048 -SPP,2034,0,0.16,0.1744,0.1924,0.2104 -SPP,2035,0,0.16,0.176,0.196,0.216 -SPP,2036,0,0.16,0.1776,0.1996,0.2216 -SPP,2037,0,0.16,0.1792,0.2032,0.2272 -SPP,2038,0,0.16,0.1808,0.2068,0.2328 -SPP,2039,0,0.16,0.1824,0.2104,0.2384 -SPP,2040,0,0.16,0.184,0.214,0.244 -SPP,2041,0,0.16,0.1856,0.2176,0.2496 -SPP,2042,0,0.16,0.1872,0.2212,0.2552 -SPP,2043,0,0.16,0.1888,0.2248,0.2608 -SPP,2044,0,0.16,0.1904,0.2284,0.2664 -SPP,2045,0,0.16,0.192,0.232,0.272 -SPP,2046,0,0.16,0.1936,0.2356,0.2776 -SPP,2047,0,0.16,0.1952,0.2392,0.2832 -SPP,2048,0,0.16,0.1968,0.2428,0.2888 -SPP,2049,0,0.16,0.1984,0.2464,0.2944 -SPP,2050,0,0.16,0.2,0.25,0.3 -ERCOT,2020,0,0.1375,0.1375,0.1375,0.1375 -ERCOT,2021,0,0.1375,0.1375,0.1375,0.1375 -ERCOT,2022,0,0.1375,0.1375,0.1375,0.1375 -ERCOT,2023,0,0.1375,0.1375,0.1375,0.1375 -ERCOT,2024,0,0.1375,0.1375,0.1375,0.1375 -ERCOT,2025,0,0.1375,0.1375,0.1375,0.1375 -ERCOT,2026,0,0.1375,0.14,0.142,0.144 -ERCOT,2027,0,0.1375,0.1425,0.1465,0.1505 -ERCOT,2028,0,0.1375,0.145,0.151,0.157 -ERCOT,2029,0,0.1375,0.1475,0.1555,0.1635 -ERCOT,2030,0,0.1375,0.15,0.16,0.17 -ERCOT,2031,0,0.1375,0.1525,0.1645,0.1765 -ERCOT,2032,0,0.1375,0.155,0.169,0.183 -ERCOT,2033,0,0.1375,0.1575,0.1735,0.1895 -ERCOT,2034,0,0.1375,0.16,0.178,0.196 -ERCOT,2035,0,0.1375,0.1625,0.1825,0.2025 -ERCOT,2036,0,0.1375,0.165,0.187,0.209 -ERCOT,2037,0,0.1375,0.1675,0.1915,0.2155 -ERCOT,2038,0,0.1375,0.17,0.196,0.222 -ERCOT,2039,0,0.1375,0.1725,0.2005,0.2285 -ERCOT,2040,0,0.1375,0.175,0.205,0.235 -ERCOT,2041,0,0.1375,0.1775,0.2095,0.2415 -ERCOT,2042,0,0.1375,0.18,0.214,0.248 -ERCOT,2043,0,0.1375,0.1825,0.2185,0.2545 -ERCOT,2044,0,0.1375,0.185,0.223,0.261 -ERCOT,2045,0,0.1375,0.1875,0.2275,0.2675 -ERCOT,2046,0,0.1375,0.19,0.232,0.274 -ERCOT,2047,0,0.1375,0.1925,0.2365,0.2805 -ERCOT,2048,0,0.1375,0.195,0.241,0.287 -ERCOT,2049,0,0.1375,0.1975,0.2455,0.2935 -ERCOT,2050,0,0.1375,0.2,0.25,0.3 -WECC_CA,2020,0,0.182,0.182,0.182,0.182 -WECC_CA,2021,0,0.182,0.182,0.182,0.182 -WECC_CA,2022,0,0.182,0.182,0.182,0.182 -WECC_CA,2023,0,0.182,0.182,0.182,0.182 -WECC_CA,2024,0,0.182,0.182,0.182,0.182 -WECC_CA,2025,0,0.182,0.182,0.182,0.182 -WECC_CA,2026,0,0.182,0.18272,0.18472,0.18672 -WECC_CA,2027,0,0.182,0.18344,0.18744,0.19144 -WECC_CA,2028,0,0.182,0.18416,0.19016,0.19616 -WECC_CA,2029,0,0.182,0.18488,0.19288,0.20088 -WECC_CA,2030,0,0.182,0.1856,0.1956,0.2056 -WECC_CA,2031,0,0.182,0.18632,0.19832,0.21032 -WECC_CA,2032,0,0.182,0.18704,0.20104,0.21504 -WECC_CA,2033,0,0.182,0.18776,0.20376,0.21976 -WECC_CA,2034,0,0.182,0.18848,0.20648,0.22448 -WECC_CA,2035,0,0.182,0.1892,0.2092,0.2292 -WECC_CA,2036,0,0.182,0.18992,0.21192,0.23392 -WECC_CA,2037,0,0.182,0.19064,0.21464,0.23864 -WECC_CA,2038,0,0.182,0.19136,0.21736,0.24336 -WECC_CA,2039,0,0.182,0.19208,0.22008,0.24808 -WECC_CA,2040,0,0.182,0.1928,0.2228,0.2528 -WECC_CA,2041,0,0.182,0.19352,0.22552,0.25752 -WECC_CA,2042,0,0.182,0.19424,0.22824,0.26224 -WECC_CA,2043,0,0.182,0.19496,0.23096,0.26696 -WECC_CA,2044,0,0.182,0.19568,0.23368,0.27168 -WECC_CA,2045,0,0.182,0.1964,0.2364,0.2764 -WECC_CA,2046,0,0.182,0.19712,0.23912,0.28112 -WECC_CA,2047,0,0.182,0.19784,0.24184,0.28584 -WECC_CA,2048,0,0.182,0.19856,0.24456,0.29056 -WECC_CA,2049,0,0.182,0.19928,0.24728,0.29528 -WECC_CA,2050,0,0.182,0.2,0.25,0.3 -WECC_NWPP,2020,0,0.1435,0.1435,0.1435,0.1435 -WECC_NWPP,2021,0,0.1435,0.1435,0.1435,0.1435 -WECC_NWPP,2022,0,0.1435,0.1435,0.1435,0.1435 -WECC_NWPP,2023,0,0.1435,0.1435,0.1435,0.1435 -WECC_NWPP,2024,0,0.1435,0.1435,0.1435,0.1435 -WECC_NWPP,2025,0,0.1435,0.1435,0.1435,0.1435 -WECC_NWPP,2026,0,0.1435,0.14576,0.14776,0.14976 -WECC_NWPP,2027,0,0.1435,0.14802,0.15202,0.15602 -WECC_NWPP,2028,0,0.1435,0.15028,0.15628,0.16228 -WECC_NWPP,2029,0,0.1435,0.15254,0.16054,0.16854 -WECC_NWPP,2030,0,0.1435,0.1548,0.1648,0.1748 -WECC_NWPP,2031,0,0.1435,0.15706,0.16906,0.18106 -WECC_NWPP,2032,0,0.1435,0.15932,0.17332,0.18732 -WECC_NWPP,2033,0,0.1435,0.16158,0.17758,0.19358 -WECC_NWPP,2034,0,0.1435,0.16384,0.18184,0.19984 -WECC_NWPP,2035,0,0.1435,0.1661,0.1861,0.2061 -WECC_NWPP,2036,0,0.1435,0.16836,0.19036,0.21236 -WECC_NWPP,2037,0,0.1435,0.17062,0.19462,0.21862 -WECC_NWPP,2038,0,0.1435,0.17288,0.19888,0.22488 -WECC_NWPP,2039,0,0.1435,0.17514,0.20314,0.23114 -WECC_NWPP,2040,0,0.1435,0.1774,0.2074,0.2374 -WECC_NWPP,2041,0,0.1435,0.17966,0.21166,0.24366 -WECC_NWPP,2042,0,0.1435,0.18192,0.21592,0.24992 -WECC_NWPP,2043,0,0.1435,0.18418,0.22018,0.25618 -WECC_NWPP,2044,0,0.1435,0.18644,0.22444,0.26244 -WECC_NWPP,2045,0,0.1435,0.1887,0.2287,0.2687 -WECC_NWPP,2046,0,0.1435,0.19096,0.23296,0.27496 -WECC_NWPP,2047,0,0.1435,0.19322,0.23722,0.28122 -WECC_NWPP,2048,0,0.1435,0.19548,0.24148,0.28748 -WECC_NWPP,2049,0,0.1435,0.19774,0.24574,0.29374 -WECC_NWPP,2050,0,0.1435,0.2,0.25,0.3 -WECC_SRSG,2020,0,0.1155,0.1155,0.1155,0.1155 -WECC_SRSG,2021,0,0.1155,0.1155,0.1155,0.1155 -WECC_SRSG,2022,0,0.1155,0.1155,0.1155,0.1155 -WECC_SRSG,2023,0,0.1155,0.1155,0.1155,0.1155 -WECC_SRSG,2024,0,0.1155,0.1155,0.1155,0.1155 -WECC_SRSG,2025,0,0.1155,0.1155,0.1155,0.1155 -WECC_SRSG,2026,0,0.1155,0.11888,0.12088,0.12288 -WECC_SRSG,2027,0,0.1155,0.12226,0.12626,0.13026 -WECC_SRSG,2028,0,0.1155,0.12564,0.13164,0.13764 -WECC_SRSG,2029,0,0.1155,0.12902,0.13702,0.14502 -WECC_SRSG,2030,0,0.1155,0.1324,0.1424,0.1524 -WECC_SRSG,2031,0,0.1155,0.13578,0.14778,0.15978 -WECC_SRSG,2032,0,0.1155,0.13916,0.15316,0.16716 -WECC_SRSG,2033,0,0.1155,0.14254,0.15854,0.17454 -WECC_SRSG,2034,0,0.1155,0.14592,0.16392,0.18192 -WECC_SRSG,2035,0,0.1155,0.1493,0.1693,0.1893 -WECC_SRSG,2036,0,0.1155,0.15268,0.17468,0.19668 -WECC_SRSG,2037,0,0.1155,0.15606,0.18006,0.20406 -WECC_SRSG,2038,0,0.1155,0.15944,0.18544,0.21144 -WECC_SRSG,2039,0,0.1155,0.16282,0.19082,0.21882 -WECC_SRSG,2040,0,0.1155,0.1662,0.1962,0.2262 -WECC_SRSG,2041,0,0.1155,0.16958,0.20158,0.23358 -WECC_SRSG,2042,0,0.1155,0.17296,0.20696,0.24096 -WECC_SRSG,2043,0,0.1155,0.17634,0.21234,0.24834 -WECC_SRSG,2044,0,0.1155,0.17972,0.21772,0.25572 -WECC_SRSG,2045,0,0.1155,0.1831,0.2231,0.2631 -WECC_SRSG,2046,0,0.1155,0.18648,0.22848,0.27048 -WECC_SRSG,2047,0,0.1155,0.18986,0.23386,0.27786 -WECC_SRSG,2048,0,0.1155,0.19324,0.23924,0.28524 -WECC_SRSG,2049,0,0.1155,0.19662,0.24462,0.29262 -WECC_SRSG,2050,0,0.1155,0.2,0.25,0.3 +*nercr,t,none,static,nerc,ramp2025_20by50 +MISO,2024,0,0.166,0.166,0.166 +MISO,2025,0,0.166,0.172,0.167 +MISO,2026,0,0.166,0.179,0.169 +MISO,2027,0,0.166,0.182,0.170 +MISO,2028,0,0.166,0.184,0.171 +MISO,2029,0,0.166,0.196,0.173 +MISO,2030,0,0.166,0.201,0.174 +MISO,2031,0,0.166,0.207,0.175 +MISO,2032,0,0.166,0.212,0.176 +MISO,2033,0,0.166,0.212,0.178 +MISO,2034,0,0.166,0.212,0.179 +MISO,2035,0,0.166,0.212,0.180 +MISO,2036,0,0.166,0.212,0.182 +MISO,2037,0,0.166,0.212,0.183 +MISO,2038,0,0.166,0.212,0.184 +MISO,2039,0,0.166,0.212,0.186 +MISO,2040,0,0.166,0.212,0.187 +MISO,2041,0,0.166,0.212,0.188 +MISO,2042,0,0.166,0.212,0.190 +MISO,2043,0,0.166,0.212,0.191 +MISO,2044,0,0.166,0.212,0.192 +MISO,2045,0,0.166,0.212,0.193 +MISO,2046,0,0.166,0.212,0.195 +MISO,2047,0,0.166,0.212,0.196 +MISO,2048,0,0.166,0.212,0.197 +MISO,2049,0,0.166,0.212,0.199 +MISO,2050,0,0.166,0.212,0.200 +NPCC_NE,2024,0,0.129,0.129,0.129 +NPCC_NE,2025,0,0.129,0.126,0.132 +NPCC_NE,2026,0,0.129,0.11,0.134 +NPCC_NE,2027,0,0.129,0.11,0.137 +NPCC_NE,2028,0,0.129,0.11,0.140 +NPCC_NE,2029,0,0.129,0.11,0.143 +NPCC_NE,2030,0,0.129,0.1,0.145 +NPCC_NE,2031,0,0.129,0.1,0.148 +NPCC_NE,2032,0,0.129,0.11,0.151 +NPCC_NE,2033,0,0.129,0.11,0.154 +NPCC_NE,2034,0,0.129,0.11,0.156 +NPCC_NE,2035,0,0.129,0.11,0.159 +NPCC_NE,2036,0,0.129,0.11,0.162 +NPCC_NE,2037,0,0.129,0.11,0.165 +NPCC_NE,2038,0,0.129,0.11,0.167 +NPCC_NE,2039,0,0.129,0.11,0.170 +NPCC_NE,2040,0,0.129,0.11,0.173 +NPCC_NE,2041,0,0.129,0.11,0.175 +NPCC_NE,2042,0,0.129,0.11,0.178 +NPCC_NE,2043,0,0.129,0.11,0.181 +NPCC_NE,2044,0,0.129,0.11,0.184 +NPCC_NE,2045,0,0.129,0.11,0.186 +NPCC_NE,2046,0,0.129,0.11,0.189 +NPCC_NE,2047,0,0.129,0.11,0.192 +NPCC_NE,2048,0,0.129,0.11,0.195 +NPCC_NE,2049,0,0.129,0.11,0.197 +NPCC_NE,2050,0,0.129,0.11,0.200 +NPCC_NY,2024,0,0.15,0.15,0.150 +NPCC_NY,2025,0,0.15,0.15,0.152 +NPCC_NY,2026,0,0.15,0.15,0.154 +NPCC_NY,2027,0,0.15,0.15,0.156 +NPCC_NY,2028,0,0.15,0.15,0.158 +NPCC_NY,2029,0,0.15,0.15,0.160 +NPCC_NY,2030,0,0.15,0.15,0.162 +NPCC_NY,2031,0,0.15,0.15,0.163 +NPCC_NY,2032,0,0.15,0.15,0.165 +NPCC_NY,2033,0,0.15,0.15,0.167 +NPCC_NY,2034,0,0.15,0.15,0.169 +NPCC_NY,2035,0,0.15,0.15,0.171 +NPCC_NY,2036,0,0.15,0.15,0.173 +NPCC_NY,2037,0,0.15,0.15,0.175 +NPCC_NY,2038,0,0.15,0.15,0.177 +NPCC_NY,2039,0,0.15,0.15,0.179 +NPCC_NY,2040,0,0.15,0.15,0.181 +NPCC_NY,2041,0,0.15,0.15,0.183 +NPCC_NY,2042,0,0.15,0.15,0.185 +NPCC_NY,2043,0,0.15,0.15,0.187 +NPCC_NY,2044,0,0.15,0.15,0.188 +NPCC_NY,2045,0,0.15,0.15,0.190 +NPCC_NY,2046,0,0.15,0.15,0.192 +NPCC_NY,2047,0,0.15,0.15,0.194 +NPCC_NY,2048,0,0.15,0.15,0.196 +NPCC_NY,2049,0,0.15,0.15,0.198 +NPCC_NY,2050,0,0.15,0.15,0.200 +PJM,2024,0,0.148,0.148,0.148 +PJM,2025,0,0.148,0.147,0.150 +PJM,2026,0,0.148,0.147,0.152 +PJM,2027,0,0.148,0.147,0.154 +PJM,2028,0,0.148,0.147,0.156 +PJM,2029,0,0.148,0.147,0.158 +PJM,2030,0,0.148,0.147,0.160 +PJM,2031,0,0.148,0.147,0.162 +PJM,2032,0,0.148,0.147,0.164 +PJM,2033,0,0.148,0.147,0.166 +PJM,2034,0,0.148,0.147,0.168 +PJM,2035,0,0.148,0.147,0.170 +PJM,2036,0,0.148,0.147,0.172 +PJM,2037,0,0.148,0.147,0.174 +PJM,2038,0,0.148,0.147,0.176 +PJM,2039,0,0.148,0.147,0.178 +PJM,2040,0,0.148,0.147,0.180 +PJM,2041,0,0.148,0.147,0.182 +PJM,2042,0,0.148,0.147,0.184 +PJM,2043,0,0.148,0.147,0.186 +PJM,2044,0,0.148,0.147,0.188 +PJM,2045,0,0.148,0.147,0.190 +PJM,2046,0,0.148,0.147,0.192 +PJM,2047,0,0.148,0.147,0.194 +PJM,2048,0,0.148,0.147,0.196 +PJM,2049,0,0.148,0.147,0.198 +PJM,2050,0,0.148,0.147,0.200 +SERC_E,2024,0,0.15,0.15,0.150 +SERC_E,2025,0,0.15,0.15,0.152 +SERC_E,2026,0,0.15,0.15,0.154 +SERC_E,2027,0,0.15,0.15,0.156 +SERC_E,2028,0,0.15,0.15,0.158 +SERC_E,2029,0,0.15,0.15,0.160 +SERC_E,2030,0,0.15,0.15,0.162 +SERC_E,2031,0,0.15,0.15,0.163 +SERC_E,2032,0,0.15,0.15,0.165 +SERC_E,2033,0,0.15,0.15,0.167 +SERC_E,2034,0,0.15,0.15,0.169 +SERC_E,2035,0,0.15,0.15,0.171 +SERC_E,2036,0,0.15,0.15,0.173 +SERC_E,2037,0,0.15,0.15,0.175 +SERC_E,2038,0,0.15,0.15,0.177 +SERC_E,2039,0,0.15,0.15,0.179 +SERC_E,2040,0,0.15,0.15,0.181 +SERC_E,2041,0,0.15,0.15,0.183 +SERC_E,2042,0,0.15,0.15,0.185 +SERC_E,2043,0,0.15,0.15,0.187 +SERC_E,2044,0,0.15,0.15,0.188 +SERC_E,2045,0,0.15,0.15,0.190 +SERC_E,2046,0,0.15,0.15,0.192 +SERC_E,2047,0,0.15,0.15,0.194 +SERC_E,2048,0,0.15,0.15,0.196 +SERC_E,2049,0,0.15,0.15,0.198 +SERC_E,2050,0,0.15,0.15,0.200 +SERC_C,2024,0,0.15,0.15,0.150 +SERC_C,2025,0,0.15,0.15,0.152 +SERC_C,2026,0,0.15,0.15,0.154 +SERC_C,2027,0,0.15,0.15,0.156 +SERC_C,2028,0,0.15,0.15,0.158 +SERC_C,2029,0,0.15,0.15,0.160 +SERC_C,2030,0,0.15,0.15,0.162 +SERC_C,2031,0,0.15,0.15,0.163 +SERC_C,2032,0,0.15,0.15,0.165 +SERC_C,2033,0,0.15,0.15,0.167 +SERC_C,2034,0,0.15,0.15,0.169 +SERC_C,2035,0,0.15,0.15,0.171 +SERC_C,2036,0,0.15,0.15,0.173 +SERC_C,2037,0,0.15,0.15,0.175 +SERC_C,2038,0,0.15,0.15,0.177 +SERC_C,2039,0,0.15,0.15,0.179 +SERC_C,2040,0,0.15,0.15,0.181 +SERC_C,2041,0,0.15,0.15,0.183 +SERC_C,2042,0,0.15,0.15,0.185 +SERC_C,2043,0,0.15,0.15,0.187 +SERC_C,2044,0,0.15,0.15,0.188 +SERC_C,2045,0,0.15,0.15,0.190 +SERC_C,2046,0,0.15,0.15,0.192 +SERC_C,2047,0,0.15,0.15,0.194 +SERC_C,2048,0,0.15,0.15,0.196 +SERC_C,2049,0,0.15,0.15,0.198 +SERC_C,2050,0,0.15,0.15,0.200 +SERC_F,2024,0,0.15,0.15,0.150 +SERC_F,2025,0,0.15,0.15,0.152 +SERC_F,2026,0,0.15,0.15,0.154 +SERC_F,2027,0,0.15,0.15,0.156 +SERC_F,2028,0,0.15,0.15,0.158 +SERC_F,2029,0,0.15,0.15,0.160 +SERC_F,2030,0,0.15,0.15,0.162 +SERC_F,2031,0,0.15,0.15,0.163 +SERC_F,2032,0,0.15,0.15,0.165 +SERC_F,2033,0,0.15,0.15,0.167 +SERC_F,2034,0,0.15,0.15,0.169 +SERC_F,2035,0,0.15,0.15,0.171 +SERC_F,2036,0,0.15,0.15,0.173 +SERC_F,2037,0,0.15,0.15,0.175 +SERC_F,2038,0,0.15,0.15,0.177 +SERC_F,2039,0,0.15,0.15,0.179 +SERC_F,2040,0,0.15,0.15,0.181 +SERC_F,2041,0,0.15,0.15,0.183 +SERC_F,2042,0,0.15,0.15,0.185 +SERC_F,2043,0,0.15,0.15,0.187 +SERC_F,2044,0,0.15,0.15,0.188 +SERC_F,2045,0,0.15,0.15,0.190 +SERC_F,2046,0,0.15,0.15,0.192 +SERC_F,2047,0,0.15,0.15,0.194 +SERC_F,2048,0,0.15,0.15,0.196 +SERC_F,2049,0,0.15,0.15,0.198 +SERC_F,2050,0,0.15,0.15,0.200 +SERC_SE,2024,0,0.15,0.15,0.150 +SERC_SE,2025,0,0.15,0.15,0.152 +SERC_SE,2026,0,0.15,0.15,0.154 +SERC_SE,2027,0,0.15,0.15,0.156 +SERC_SE,2028,0,0.15,0.15,0.158 +SERC_SE,2029,0,0.15,0.15,0.160 +SERC_SE,2030,0,0.15,0.15,0.162 +SERC_SE,2031,0,0.15,0.15,0.163 +SERC_SE,2032,0,0.15,0.15,0.165 +SERC_SE,2033,0,0.15,0.15,0.167 +SERC_SE,2034,0,0.15,0.15,0.169 +SERC_SE,2035,0,0.15,0.15,0.171 +SERC_SE,2036,0,0.15,0.15,0.173 +SERC_SE,2037,0,0.15,0.15,0.175 +SERC_SE,2038,0,0.15,0.15,0.177 +SERC_SE,2039,0,0.15,0.15,0.179 +SERC_SE,2040,0,0.15,0.15,0.181 +SERC_SE,2041,0,0.15,0.15,0.183 +SERC_SE,2042,0,0.15,0.15,0.185 +SERC_SE,2043,0,0.15,0.15,0.187 +SERC_SE,2044,0,0.15,0.15,0.188 +SERC_SE,2045,0,0.15,0.15,0.190 +SERC_SE,2046,0,0.15,0.15,0.192 +SERC_SE,2047,0,0.15,0.15,0.194 +SERC_SE,2048,0,0.15,0.15,0.196 +SERC_SE,2049,0,0.15,0.15,0.198 +SERC_SE,2050,0,0.15,0.15,0.200 +SPP,2024,0,0.19,0.19,0.190 +SPP,2025,0,0.19,0.19,0.190 +SPP,2026,0,0.19,0.19,0.191 +SPP,2027,0,0.19,0.19,0.191 +SPP,2028,0,0.19,0.19,0.192 +SPP,2029,0,0.19,0.19,0.192 +SPP,2030,0,0.19,0.19,0.192 +SPP,2031,0,0.19,0.19,0.193 +SPP,2032,0,0.19,0.19,0.193 +SPP,2033,0,0.19,0.19,0.193 +SPP,2034,0,0.19,0.19,0.194 +SPP,2035,0,0.19,0.19,0.194 +SPP,2036,0,0.19,0.19,0.195 +SPP,2037,0,0.19,0.19,0.195 +SPP,2038,0,0.19,0.19,0.195 +SPP,2039,0,0.19,0.19,0.196 +SPP,2040,0,0.19,0.19,0.196 +SPP,2041,0,0.19,0.19,0.197 +SPP,2042,0,0.19,0.19,0.197 +SPP,2043,0,0.19,0.19,0.197 +SPP,2044,0,0.19,0.19,0.198 +SPP,2045,0,0.19,0.19,0.198 +SPP,2046,0,0.19,0.19,0.198 +SPP,2047,0,0.19,0.19,0.199 +SPP,2048,0,0.19,0.19,0.199 +SPP,2049,0,0.19,0.19,0.200 +SPP,2050,0,0.19,0.19,0.200 +ERCOT,2024,0,0.1375,0.1375,0.138 +ERCOT,2025,0,0.1375,0.1375,0.140 +ERCOT,2026,0,0.1375,0.1375,0.142 +ERCOT,2027,0,0.1375,0.1375,0.145 +ERCOT,2028,0,0.1375,0.1375,0.147 +ERCOT,2029,0,0.1375,0.1375,0.150 +ERCOT,2030,0,0.1375,0.1375,0.152 +ERCOT,2031,0,0.1375,0.1375,0.154 +ERCOT,2032,0,0.1375,0.1375,0.157 +ERCOT,2033,0,0.1375,0.1375,0.159 +ERCOT,2034,0,0.1375,0.1375,0.162 +ERCOT,2035,0,0.1375,0.1375,0.164 +ERCOT,2036,0,0.1375,0.1375,0.166 +ERCOT,2037,0,0.1375,0.1375,0.169 +ERCOT,2038,0,0.1375,0.1375,0.171 +ERCOT,2039,0,0.1375,0.1375,0.174 +ERCOT,2040,0,0.1375,0.1375,0.176 +ERCOT,2041,0,0.1375,0.1375,0.178 +ERCOT,2042,0,0.1375,0.1375,0.181 +ERCOT,2043,0,0.1375,0.1375,0.183 +ERCOT,2044,0,0.1375,0.1375,0.186 +ERCOT,2045,0,0.1375,0.1375,0.188 +ERCOT,2046,0,0.1375,0.1375,0.190 +ERCOT,2047,0,0.1375,0.1375,0.193 +ERCOT,2048,0,0.1375,0.1375,0.195 +ERCOT,2049,0,0.1375,0.1375,0.198 +ERCOT,2050,0,0.1375,0.1375,0.200 +WECC_CA,2024,0,0.1864,0.1864,0.186 +WECC_CA,2025,0,0.1864,0.1854,0.187 +WECC_CA,2026,0,0.1864,0.1842,0.187 +WECC_CA,2027,0,0.1864,0.1826,0.188 +WECC_CA,2028,0,0.1864,0.1528,0.188 +WECC_CA,2029,0,0.1864,0.1781,0.189 +WECC_CA,2030,0,0.1864,0.1758,0.190 +WECC_CA,2031,0,0.1864,0.1734,0.190 +WECC_CA,2032,0,0.1864,0.1717,0.191 +WECC_CA,2033,0,0.1864,0.1701,0.191 +WECC_CA,2034,0,0.1864,0.1701,0.192 +WECC_CA,2035,0,0.1864,0.1701,0.192 +WECC_CA,2036,0,0.1864,0.1701,0.193 +WECC_CA,2037,0,0.1864,0.1701,0.193 +WECC_CA,2038,0,0.1864,0.1701,0.194 +WECC_CA,2039,0,0.1864,0.1701,0.194 +WECC_CA,2040,0,0.1864,0.1701,0.195 +WECC_CA,2041,0,0.1864,0.1701,0.195 +WECC_CA,2042,0,0.1864,0.1701,0.196 +WECC_CA,2043,0,0.1864,0.1701,0.196 +WECC_CA,2044,0,0.1864,0.1701,0.197 +WECC_CA,2045,0,0.1864,0.1701,0.197 +WECC_CA,2046,0,0.1864,0.1701,0.198 +WECC_CA,2047,0,0.1864,0.1701,0.198 +WECC_CA,2048,0,0.1864,0.1701,0.199 +WECC_CA,2049,0,0.1864,0.1701,0.199 +WECC_CA,2050,0,0.1864,0.1701,0.200 +WECC_NW,2024,0,0.189,0.189,0.189 +WECC_NW,2025,0,0.189,0.189,0.189 +WECC_NW,2026,0,0.189,0.176,0.190 +WECC_NW,2027,0,0.189,0.176,0.190 +WECC_NW,2028,0,0.189,0.174,0.191 +WECC_NW,2029,0,0.189,0.168,0.191 +WECC_NW,2030,0,0.189,0.165,0.192 +WECC_NW,2031,0,0.189,0.164,0.192 +WECC_NW,2032,0,0.189,0.165,0.192 +WECC_NW,2033,0,0.189,0.163,0.193 +WECC_NW,2034,0,0.189,0.163,0.193 +WECC_NW,2035,0,0.189,0.163,0.194 +WECC_NW,2036,0,0.189,0.163,0.194 +WECC_NW,2037,0,0.189,0.163,0.195 +WECC_NW,2038,0,0.189,0.163,0.195 +WECC_NW,2039,0,0.189,0.163,0.195 +WECC_NW,2040,0,0.189,0.163,0.196 +WECC_NW,2041,0,0.189,0.163,0.196 +WECC_NW,2042,0,0.189,0.163,0.197 +WECC_NW,2043,0,0.189,0.163,0.197 +WECC_NW,2044,0,0.189,0.163,0.197 +WECC_NW,2045,0,0.189,0.163,0.198 +WECC_NW,2046,0,0.189,0.163,0.198 +WECC_NW,2047,0,0.189,0.163,0.199 +WECC_NW,2048,0,0.189,0.163,0.199 +WECC_NW,2049,0,0.189,0.163,0.200 +WECC_NW,2050,0,0.189,0.163,0.200 +WECC_SW,2024,0,0.131,0.131,0.131 +WECC_SW,2025,0,0.131,0.134,0.134 +WECC_SW,2026,0,0.131,0.131,0.136 +WECC_SW,2027,0,0.131,0.128,0.139 +WECC_SW,2028,0,0.131,0.113,0.142 +WECC_SW,2029,0,0.131,0.123,0.144 +WECC_SW,2030,0,0.131,0.122,0.147 +WECC_SW,2031,0,0.131,0.122,0.150 +WECC_SW,2032,0,0.131,0.12,0.152 +WECC_SW,2033,0,0.131,0.117,0.155 +WECC_SW,2034,0,0.131,0.117,0.158 +WECC_SW,2035,0,0.131,0.117,0.160 +WECC_SW,2036,0,0.131,0.117,0.163 +WECC_SW,2037,0,0.131,0.117,0.166 +WECC_SW,2038,0,0.131,0.117,0.168 +WECC_SW,2039,0,0.131,0.117,0.171 +WECC_SW,2040,0,0.131,0.117,0.173 +WECC_SW,2041,0,0.131,0.117,0.176 +WECC_SW,2042,0,0.131,0.117,0.179 +WECC_SW,2043,0,0.131,0.117,0.181 +WECC_SW,2044,0,0.131,0.117,0.184 +WECC_SW,2045,0,0.131,0.117,0.187 +WECC_SW,2046,0,0.131,0.117,0.189 +WECC_SW,2047,0,0.131,0.117,0.192 +WECC_SW,2048,0,0.131,0.117,0.195 +WECC_SW,2049,0,0.131,0.117,0.197 +WECC_SW,2050,0,0.131,0.117,0.200 diff --git a/inputs/scalars.csv b/inputs/scalars.csv index 454e829..be1bfc7 100644 --- a/inputs/scalars.csv +++ b/inputs/scalars.csv @@ -62,7 +62,6 @@ smr_capture_rate,0.9,"--fraction-- capture rate of CO2 for SMR with CCS" smr_co2_intensity,9.83,"--tonnes CO2 / tonnes H2-- emissions rate for SMR H2 production. The median CO2 emission normalized for SMR hydrogen production was 9 kg CO2/kg H2 production, or 75 g CO2/MJ H2 (using H2 low heating value [LHV]). The median emission is similar with the value of 9.26 kg CO2/kg H2 in GREET 2018, which was based on the H2A modeling by Rutkowski et al (2012). (https://greet.es.anl.gov/publication-smr_h2_2019) Actual emissions value of 9.83 and 90% capture rate based on a 2011 NETL study (DOE/NETL-2011/1434)" state_cap_start_yr,2014,"--year-- state co2 cap start year" storage_eff_psh,0.8,"--fraction-- round-trip efficiency of PSH" -storage_lifetime_cost_adjust_pvb,1.24,"--fraction-- The CRF_15 divided by CRF_20 to account for batteries only having a 15-year lifetime. It technically should change over time (if CRF changes over time), but is represented as a constant value here for simplicity" storage_vom_min,0.01,--2004$/MWh-- minimum VOM for storage technologies (included to avoid degeneracy with VRE curtailment) szn_adj_gas_winter,0.054,"--fraction-- seasonal adjustment for gas prices. calculated based on natural gas futures prices -- see documentation" topfuelbinwidth,2,"top fuel bin width" diff --git a/inputs/state_policies/recstyle.csv b/inputs/state_policies/recstyle.csv index c244604..3683cc6 100644 --- a/inputs/state_policies/recstyle.csv +++ b/inputs/state_policies/recstyle.csv @@ -1,2 +1,3 @@ *st,RPSCat,style RI,RPS_All,1 +RI,CES,1 diff --git a/inputs/supplycurvedata/rev_paths.csv b/inputs/supplycurvedata/rev_paths.csv index c801ecb..56637f6 100644 --- a/inputs/supplycurvedata/rev_paths.csv +++ b/inputs/supplycurvedata/rev_paths.csv @@ -1,11 +1,11 @@ -tech,access_switch,access_case,sc_path,rev_path,sc_file,hpc_sc_file,original_rev_folder,cf_path -upv,GSw_SitingUPV,reference,UPV/2023_06_06_Update,UPV/2023_06_06_Update/reV/01_reference,UPV/2023_06_06_Update/reV/post_processed_supply_curves/01_reference_supply-curve.csv,/shared-projects/reeds/Supply_Curve_Data/UPV/2023_06_06_Update/reV/post_processed_supply_curves/01_reference_supply-curve.csv,/shared-projects/rev/projects/seto/fy23/rev/standard_scenarios/aggregation,/shared-projects/rev/projects/seto/fy23/rev/standard_scenarios/generation/generation_multi-year.h5 -upv,GSw_SitingUPV,open,UPV/2023_06_06_Update,UPV/2023_06_06_Update/reV/00_open,UPV/2023_06_06_Update/reV/post_processed_supply_curves/00_open_supply-curve.csv,/shared-projects/reeds/Supply_Curve_Data/UPV/2023_06_06_Update/reV/0_moderate_open/post_processed_supply_curves/00_open_supply-curve.csv,/shared-projects/rev/projects/seto/fy23/rev/standard_scenarios/aggregation,/shared-projects/rev/projects/seto/fy23/rev/standard_scenarios/generation/generation_multi-year.h5 -upv,GSw_SitingUPV,limited,UPV/2023_06_06_Update,UPV/2023_06_06_Update/reV/02_limited,UPV/2023_06_06_Update/reV/post_processed_supply_curves/02_limited_supply-curve.csv,/shared-projects/reeds/Supply_Curve_Data/UPV/2023_06_06_Update/reV/post_processed_supply_curves/02_limited_supply-curve.csv,/shared-projects/rev/projects/seto/fy23/rev/standard_scenarios/aggregation,/shared-projects/rev/projects/seto/fy23/rev/standard_scenarios/generation/generation_multi-year.h5 -wind-ofs,GSw_SitingWindOfs,open,OFFSHORE/2023_03_31_Update,OFFSHORE/2023_03_31_Update/reV/0_open_moderate,OFFSHORE/2023_03_31_Update/reV/0_open_moderate/0_open_moderate_supply-curve.csv,/shared-projects/reeds/Supply_Curve_Data/OFFSHORE/2023_03_31_Update/reV/0_open_moderate/0_open_moderate_supply-curve.csv,,/shared-projects/rev/projects/weto/fy22/standard_scenarios/rev/generation/4_moderate_offshore/4_moderate_offshore_multi-year.h5 -wind-ofs,GSw_SitingWindOfs,limited,OFFSHORE/2023_03_31_Update,OFFSHORE/2023_03_31_Update/reV/1_limited_moderate,OFFSHORE/2023_03_31_Update/reV/1_limited_moderate/1_limited_moderate_supply-curve.csv,/shared-projects/reeds/Supply_Curve_Data/OFFSHORE/2023_03_31_Update/reV/1_limited_moderate/1_limited_moderate_supply-curve.csv,,/shared-projects/rev/projects/weto/fy22/standard_scenarios/rev/generation/4_moderate_offshore/4_moderate_offshore_multi-year.h5 -wind-ons,GSw_SitingWindOns,open,ONSHORE/2023_07_28_Update,ONSHORE/2023_07_28_Update/reV/open_2030_moderate_115hh_170rd,ONSHORE/2023_07_28_Update/reV/processed_tables/open_2030_moderate_115hh_170rd_supply-curve.csv,/shared-projects/reeds/Supply_Curve_Data/ONSHORE/2023_07_28_Update/reV/processed_tables/open_2030_moderate_115hh_170rd_supply-curve.csv,/shared-projects/rev/projects/weto/fy23/atb/rev/standard_scenarios/corrections,/shared-projects/rev/projects/weto/fy23/atb/rev/standard_scenarios/open_2030_moderate_115hh_170rd/open_2030_moderate_115hh_170rd_bespoke.h5 -wind-ons,GSw_SitingWindOns,reference,ONSHORE/2023_07_28_Update,ONSHORE/2023_07_28_Update/reV/reference_2030_moderate_115hh_170rd,ONSHORE/2023_07_28_Update/reV/processed_tables/reference_2030_moderate_115hh_170rd_supply-curve.csv,/shared-projects/reeds/Supply_Curve_Data/ONSHORE/2023_07_28_Update/reV/processed_tables/reference_2030_moderate_115hh_170rd_supply-curve.csv,/shared-projects/rev/projects/weto/fy23/atb/rev/standard_scenarios/corrections,/shared-projects/rev/projects/weto/fy23/atb/rev/standard_scenarios/reference_2030_moderate_115hh_170rd/reference_2030_moderate_115hh_170rd_bespoke.h5 -wind-ons,GSw_SitingWindOns,limited,ONSHORE/2023_07_28_Update,ONSHORE/2023_07_28_Update/reV/limited_2030_moderate_115hh_170rd,ONSHORE/2023_07_28_Update/reV/processed_tables/limited_2030_moderate_115hh_170rd_supply-curve.csv,/shared-projects/reeds/Supply_Curve_Data/ONSHORE/2023_07_28_Update/reV/processed_tables/limited_2030_moderate_115hh_170rd_supply-curve.csv,/shared-projects/rev/projects/weto/fy23/atb/rev/standard_scenarios/corrections,/shared-projects/rev/projects/weto/fy23/atb/rev/standard_scenarios/limited_2030_moderate_115hh_170rd/limited_2030_moderate_115hh_170rd_bespoke.h5 -dupv,none,none,DUPV/2018_Update,DUPV/2018_Update,,,, -csp,none,none,CSP/2019_Existing,CSP/2019_Existing,,,, +tech,access_switch,access_case,sc_path,rev_path,sc_file,original_rev_folder,cf_path +upv,GSw_SitingUPV,reference,UPV/2023_06_06_Update,UPV/2023_06_06_Update/reV/01_reference,UPV/2023_06_06_Update/reV/post_processed_supply_curves/01_reference_supply-curve.csv,/shared-projects/rev/projects/seto/fy23/rev/standard_scenarios/aggregation,/shared-projects/rev/projects/seto/fy23/rev/standard_scenarios/generation/generation_multi-year.h5 +upv,GSw_SitingUPV,open,UPV/2023_06_06_Update,UPV/2023_06_06_Update/reV/00_open,UPV/2023_06_06_Update/reV/post_processed_supply_curves/00_open_supply-curve.csv,/shared-projects/rev/projects/seto/fy23/rev/standard_scenarios/aggregation,/shared-projects/rev/projects/seto/fy23/rev/standard_scenarios/generation/generation_multi-year.h5 +upv,GSw_SitingUPV,limited,UPV/2023_06_06_Update,UPV/2023_06_06_Update/reV/02_limited,UPV/2023_06_06_Update/reV/post_processed_supply_curves/02_limited_supply-curve.csv,/shared-projects/rev/projects/seto/fy23/rev/standard_scenarios/aggregation,/shared-projects/rev/projects/seto/fy23/rev/standard_scenarios/generation/generation_multi-year.h5 +wind-ofs,GSw_SitingWindOfs,open,OFFSHORE/2023_03_31_Update,OFFSHORE/2023_03_31_Update/reV/0_open_moderate,OFFSHORE/2023_03_31_Update/reV/0_open_moderate/0_open_moderate_supply-curve.csv,,/shared-projects/rev/projects/weto/fy22/standard_scenarios/rev/generation/4_moderate_offshore/4_moderate_offshore_multi-year.h5 +wind-ofs,GSw_SitingWindOfs,limited,OFFSHORE/2023_03_31_Update,OFFSHORE/2023_03_31_Update/reV/1_limited_moderate,OFFSHORE/2023_03_31_Update/reV/1_limited_moderate/1_limited_moderate_supply-curve.csv,,/shared-projects/rev/projects/weto/fy22/standard_scenarios/rev/generation/4_moderate_offshore/4_moderate_offshore_multi-year.h5 +wind-ons,GSw_SitingWindOns,open,ONSHORE/2023_07_28_Update,ONSHORE/2023_07_28_Update/reV/open_2030_moderate_115hh_170rd,ONSHORE/2023_07_28_Update/reV/processed_tables/open_2030_moderate_115hh_170rd_supply-curve.csv,/shared-projects/rev/projects/weto/fy23/atb/rev/standard_scenarios/corrections,/shared-projects/rev/projects/weto/fy23/atb/rev/standard_scenarios/open_2030_moderate_115hh_170rd/open_2030_moderate_115hh_170rd_bespoke.h5 +wind-ons,GSw_SitingWindOns,reference,ONSHORE/2023_07_28_Update,ONSHORE/2023_07_28_Update/reV/reference_2030_moderate_115hh_170rd,ONSHORE/2023_07_28_Update/reV/processed_tables/reference_2030_moderate_115hh_170rd_supply-curve.csv,/shared-projects/rev/projects/weto/fy23/atb/rev/standard_scenarios/corrections,/shared-projects/rev/projects/weto/fy23/atb/rev/standard_scenarios/reference_2030_moderate_115hh_170rd/reference_2030_moderate_115hh_170rd_bespoke.h5 +wind-ons,GSw_SitingWindOns,limited,ONSHORE/2023_07_28_Update,ONSHORE/2023_07_28_Update/reV/limited_2030_moderate_115hh_170rd,ONSHORE/2023_07_28_Update/reV/processed_tables/limited_2030_moderate_115hh_170rd_supply-curve.csv,/shared-projects/rev/projects/weto/fy23/atb/rev/standard_scenarios/corrections,/shared-projects/rev/projects/weto/fy23/atb/rev/standard_scenarios/limited_2030_moderate_115hh_170rd/limited_2030_moderate_115hh_170rd_bespoke.h5 +dupv,none,none,DUPV/2018_Update,DUPV/2018_Update,,, +csp,none,none,CSP/2019_Existing,CSP/2019_Existing,,, \ No newline at end of file diff --git a/inputs/variability/multi_year/upv_140AC_ba-reference.h5 b/inputs/variability/multi_year/upv_140AC-reference_ba.h5 similarity index 100% rename from inputs/variability/multi_year/upv_140AC_ba-reference.h5 rename to inputs/variability/multi_year/upv_140AC-reference_ba.h5 diff --git a/inputs/variability/multi_year/upv_220AC_ba-reference.h5 b/inputs/variability/multi_year/upv_220AC-reference_ba.h5 similarity index 100% rename from inputs/variability/multi_year/upv_220AC_ba-reference.h5 rename to inputs/variability/multi_year/upv_220AC-reference_ba.h5 diff --git a/postprocessing/bokehpivot/in/reeds2/trtype_style.csv b/postprocessing/bokehpivot/in/reeds2/trtype_style.csv index 95fae5b..d01bf02 100644 --- a/postprocessing/bokehpivot/in/reeds2/trtype_style.csv +++ b/postprocessing/bokehpivot/in/reeds2/trtype_style.csv @@ -5,5 +5,5 @@ AC,#2CA02C B2B,#9467BD "DC, LCC",#FF7F0E "DC, VSC",#D62728 -Reinforcement,#636363 +Reinforcement,#969696 Spur,#BDBDBD diff --git a/postprocessing/compare_casegroup.py b/postprocessing/compare_casegroup.py deleted file mode 100644 index f4e9b15..0000000 --- a/postprocessing/compare_casegroup.py +++ /dev/null @@ -1,1331 +0,0 @@ -#%% Imports -import numpy as np -import pandas as pd -import matplotlib as mpl -import matplotlib.pyplot as plt -from matplotlib import patheffects as pe -import os -import sys -import io -import argparse -import site -import subprocess as sp -import platform -from glob import glob -from tqdm import tqdm -import traceback -import cmocean -import pptx -from pptx.util import Inches, Pt - -reeds_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) -remotepath = '/Volumes/ReEDS/' if sys.platform == 'darwin' else r'//nrelnas01/ReEDS/' - -### Format plots and load other convenience functions -site.addsitedir(os.path.join(reeds_path,'postprocessing')) -import plots -import reedsplots -plots.plotparams() - -#%% Argument inputs -parser = argparse.ArgumentParser( - description='Compare multiple ReEDS cases', - formatter_class=argparse.ArgumentDefaultsHelpFormatter, -) -parser.add_argument( - 'caselist', type=str, - help=('comma-delimited list of cases to plot, OR shared casename prefix, ' - 'OR csv file of cases. The first case is treated as the base case ' - 'unless a different one is provided via the --basecase/-b argument.')) -parser.add_argument( - '--casenames', '-n', type=str, default='', - help='comma-delimited list of shorter case names to use in plots') -parser.add_argument( - '--titleshorten', '-s', type=str, default='', - help='characters to cut from start of case name (only used if no casenames)') -parser.add_argument( - '--startyear', '-t', type=int, default=2020, - help='First year to show') -parser.add_argument( - '--sharey', '-y', action='store_true', - help='Use same y-axis scale for absolute and difference plots') -parser.add_argument( - '--basecase', '-b', type=str, default='', - help='Substring of case path to use as default (if empty, uses first case in list)') -parser.add_argument( - '--level', '-l', type=str, default='transreg', - choices=['interconnect','nercr','transreg','transgrp','st'], - help='hierarchy level at which to plot regional results') -parser.add_argument( - '--skipbp', '-p', action='store_true', - help='flag to prevent bokehpivot report from being generated') -parser.add_argument( - '--bpreport', '-r', type=str, default='standard_report_reduced', - help='which bokehpivot report to generate') - -args = parser.parse_args() -_caselist = args.caselist.split(',') -_casenames = args.casenames -try: - titleshorten = int(args.titleshorten) -except ValueError: - titleshorten = len(args.titleshorten) -_basecase = args.basecase -startyear = args.startyear -sharey = True if args.sharey else 'row' -level = args.level -bpreport = args.bpreport -skipbp = args.skipbp -interactive = False - -#%% Inputs for testing -# reeds_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) -# _caselist = [os.path.join(reeds_path,'postprocessing','comparegroup_20240207_single.csv')] -# _caselist = [os.path.join(reeds_path,'postprocessing','comparegroup_20240207_small2large.csv')] -# _casenames = '' -# titleshorten = 0 -# startyear = 2020 -# sharey = 'row' -# _basecase = '' -# level = 'transreg' -# skipbp = True -# bpreport = 'standard_report_reduced' -# interactive = True - - -#%%### Fixed inputs -cmap = cmocean.cm.rain -cmap_diff = plt.cm.RdBu_r - -techmap = { - **{f'upv_{i}':'Utility PV' for i in range(20)}, - **{f'dupv_{i}':'Utility PV' for i in range(20)}, - **{f'wind-ons_{i}':'Land-based wind' for i in range(20)}, - **{f'wind-ofs_{i}':'Offshore wind' for i in range(20)}, - **dict(zip(['nuclear','nuclear-smr'], ['Nuclear']*20)), - **dict(zip( - ['gas-cc_re-cc','gas-ct_re-ct','re-cc','re-ct', - 'gas-cc_h2-ct','gas-ct_h2-ct','h2-cc','h2-ct',], - ['H2 turbine']*20)), - **{f'battery_{i}':'Battery/PSH' for i in range(20)}, **{'pumped-hydro':'Battery/PSH'}, - **dict(zip( - ['coal-igcc', 'coaloldscr', 'coalolduns', 'gas-cc', 'gas-ct', 'coal-new', 'o-g-s',], - ['Fossil']*20)), - **dict(zip( - ['gas-cc_gas-cc-ccs_mod','gas-cc_gas-cc-ccs_max','gas-cc-ccs_mod','gas-cc-ccs_max', - 'gas-cc_gas-cc-ccs_mod','coal-igcc_coal-ccs_mod','coal-new_coal-ccs_mod', - 'coaloldscr_coal-ccs_mod','coalolduns_coal-ccs_mod','cofirenew_coal-ccs_mod', - 'cofireold_coal-ccs_mod','gas-cc_gas-cc-ccs_max','coal-igcc_coal-ccs_max', - 'coal-new_coal-ccs_max','coaloldscr_coal-ccs_max','coalolduns_coal-ccs_max', - 'cofirenew_coal-ccs_max','cofireold_coal-ccs_max',], - ['Fossil+CCS']*50)), - **dict(zip(['dac','beccs_mod','beccs_max'],['CO2 removal']*20)), -} - -maptechs = [ - 'Utility PV', - 'Land-based wind', - 'Offshore wind', - 'Nuclear', - 'H2 turbine', - 'Battery/PSH', - 'Fossil+CCS', -] - - -#%%### Functions -def add_to_pptx( - title=None, file=None, left=0, top=0.62, width=13.33, height=None, - verbose=1, slide=None, - ): - """Add current matplotlib figure (or file if specified) to new powerpoint slide""" - if not file: - image = io.BytesIO() - plt.savefig(image, format='png') - else: - image = file - if not os.path.exists(image): - raise FileNotFoundError(image) - - if slide is None: - slide = prs.slides.add_slide(blank_slide_layout) - slide.shapes.title.text = title - slide.shapes.add_picture( - image, - left=(None if left is None else Inches(left)), - top=(None if top is None else Inches(top)), - width=(None if width is None else Inches(width)), - height=(None if height is None else Inches(height)), - ) - if verbose: - print(title) - return slide - - -def add_textbox( - text, slide, - left=0, top=7.2, width=13.33, height=0.3, - fontsize=14, - ): - """Add a textbox to the specified slide""" - textbox = slide.shapes.add_textbox( - left=(None if left is None else Inches(left)), - top=(None if top is None else Inches(top)), - width=(None if width is None else Inches(width)), - height=(None if height is None else Inches(height)), - ) - p = textbox.text_frame.paragraphs[0] - run = p.add_run() - run.text = text - font = run.font - font.size = Pt(fontsize) - return slide - - -#%%### Procedure -#%% Parse arguments -use_table_casenames = False -if len(_caselist) == 1: - ## If it's a .csv, read the cases to compare - if _caselist[0].endswith('.csv'): - dfcase = pd.read_csv(_caselist[0], header=None) - ## First check it's a simple csv with one case per row - if dfcase.shape[1] == 1: - caselist = dfcase[0].tolist() - ## Then check if it's a 2-column csv with [casepath,casename] header - elif ( - (dfcase.shape[1] == 2) - and (dfcase.loc[0,[0,1]].tolist() == ['casepath', 'casename']) - ): - dfcase.columns = ['casepath','casename'] - dfcase.drop(0, inplace=True) - ## Drop cases that haven't finished yet - dfcase = dfcase.loc[ - dfcase.casepath.map( - lambda x: os.path.isfile(os.path.join(x,'outputs','reeds-report','report.xlsx'))) - ].copy() - caselist = dfcase.casepath.tolist() - use_table_casenames = True - ## Otherwise assume it's a copy of a cases_{batchname}.csv file in a case folder - ## This approach is less robust; the others are preferred. - else: - prefix_plus_tail = os.path.dirname(_caselist[0]) - tails = [i for i in dfcase.iloc[0] if i not in ['Default Value',np.nan]] - prefix = prefix_plus_tail[:-len([i for i in tails if prefix_plus_tail.endswith(i)][0])] - caselist = [prefix+i for i in tails] - ## Otherwise look for all runs starting with the provided string - else: - caselist = sorted(glob(_caselist[0]+'*')) - ## If no titleshorten is provided, use the provided prefix - if not titleshorten: - titleshorten = len(os.path.basename(args.caselist)) -else: - caselist = _caselist - -## Remove cases that haven't finished yet -caselist = [ - i for i in caselist - if os.path.isfile(os.path.join(i,'outputs','reeds-report','report.xlsx')) -] - -if use_table_casenames: - casenames = dfcase.casename.tolist() -else: - casenames = ( - _casenames.split(',') if len(_casenames) - else [os.path.basename(c)[titleshorten:] for c in caselist] - ) - -if len(caselist) != len(casenames): - err = ( - f"len(caselist) = {len(caselist)} but len(casenames) = {len(casenames)}\n\n" - 'caselist:\n' + '\n'.join(caselist) + '\n\n' - 'casenames:\n' + '\n'.join(casenames) + '\n' - ) - raise ValueError(err) - -cases = dict(zip(casenames, caselist)) -maxlength = max([len(c) for c in cases]) - -if not len(_basecase): - basecase = list(cases.keys())[0] -else: - basepath = [c for c in cases.values() if c.endswith(_basecase)] - if len(basepath) == 0: - err = ( - f"Use a basecase that matches one case.\nbasecase={_basecase} matches none of:\n" - + '\n'.join(basepath) - ) - raise ValueError(err) - elif len(basepath) > 1: - err = ( - f"Use a basecase that only matches one case.\nbasecase={_basecase} matches:\n" - + '\n'.join(basepath) - ) - raise ValueError(err) - else: - basepath = basepath[0] - ## basecase is the short name; basepath is the full path - basecase = casenames[caselist.index(basepath)] - ## Put it first in the list - cases = {**{basecase:cases[basecase]}, **{k:v for k,v in cases.items() if k != basecase}} - -for case, path in cases.items(): - print(f'{path} -> {case}' + (' (base)' if case == basecase else '')) - -colors = plots.rainbowmapper(cases) - -#%% Create output folder -outpath = os.path.join(cases[basecase], 'outputs', 'comparisons') -os.makedirs(outpath, exist_ok=True) -## clip name to max length and removing disallowed characters -max_filename_length = os.pathconf(os.sep,'PC_NAME_MAX') -savename = os.path.join( - outpath, - (f"results-{','.join(cases.keys())}" - .replace('/','').replace(' ','') - [:max_filename_length-len('.pptx')]) + '.pptx' -) -print(f'Saving results to {savename}') - -#%% Create bokehpivot report as subprocess -if not skipbp: - start_str = 'start ' if platform.system() == 'Windows' else '' - bp_path = f'{reeds_path}/postprocessing/bokehpivot' - bp_py_file = f'{bp_path}/reports/interface_report_model.py' - report_path = f'{bp_path}/reports/templates/reeds2/{bpreport}.py' - bp_outpath = f'{outpath}/{bpreport}-diff-multicase' - add_diff = 'Yes' - auto_open = 'Yes' - bp_colors = pd.read_csv(f'{bp_path}/in/example_reeds_scenarios.csv')['color'].tolist() - bp_colors = bp_colors*10 #Up to 200 scenarios - bp_colors = bp_colors[:len(casenames)] - df_scenarios = pd.DataFrame({'name':casenames, 'color':bp_colors, 'path':caselist}) - scenarios_path = f'{outpath}/scenarios.csv' - df_scenarios.to_csv(scenarios_path, index=False) - call_str = ( - f'{start_str}python "{bp_py_file}" "ReEDS 2.0" "{scenarios_path}" all ' + - f'{add_diff} "{basecase}" "{report_path}" "html,excel" one "{bp_outpath}" {auto_open}' - ) - sp.Popen(call_str, shell=True) - -#%%### Load data -#%% Shared -## Determine if we're on a branch before or after county-level capability was merged -countyreeds = ( - True if os.path.isfile(os.path.join(reeds_path,'inputs','transmission','r_rr_adj_county.csv')) - else False -) -if countyreeds: - hierarchy = pd.read_csv( - os.path.join(reeds_path,'inputs','hierarchy.csv') - ).drop(['*county','county_name'], axis=1).drop_duplicates().rename(columns={'ba':'r'}).set_index('r') -else: - hierarchy = pd.read_csv( - os.path.join(reeds_path,'inputs','hierarchy.csv') - ).rename(columns={'*r':'r'}).set_index('r') -hierarchy = hierarchy.loc[hierarchy.country.str.lower()=='usa'].copy() - -sw = pd.read_csv( - os.path.join(cases[case],'inputs_case','switches.csv'), - header=None, index_col=0).squeeze(1) - -scalars = pd.read_csv( - os.path.join(cases[case], 'inputs_case', 'scalars.csv'), - header=None, usecols=[0,1], index_col=0).squeeze(1) -phaseout_trigger = float(scalars.co2_emissions_2022) * float(sw.GSw_TCPhaseout_trigger_f) - -#%% Colors -bokehcostcolors = pd.read_csv( - os.path.join( - reeds_path,'postprocessing','bokehpivot','in','reeds2','cost_cat_style.csv'), - index_col='order').squeeze(1) -bokehcostcolors = bokehcostcolors.loc[~bokehcostcolors.index.duplicated()] - -colors_time = pd.read_csv( - os.path.join( - reeds_path,'postprocessing','bokehpivot','in','reeds2','process_style.csv'), - index_col='order', -).squeeze(1) - -bokehcolors = pd.read_csv( - os.path.join(reeds_path,'postprocessing','bokehpivot','in','reeds2','tech_style.csv'), - index_col='order').squeeze(1) - -tech_map = pd.read_csv( - os.path.join(reeds_path,'postprocessing','bokehpivot','in','reeds2','tech_map.csv'), - index_col='raw').squeeze(1) - -bokehcolors = pd.concat([ - bokehcolors.loc['smr':'electrolyzer'], - pd.Series('#D55E00', index=['dac'], name='color'), - bokehcolors.loc[:'Canada'], -]) - -bokehcolors['canada'] = bokehcolors['Canada'] - -techcolors = { - 'gas-cc_gas-cc-ccs':bokehcolors['gas-cc-ccs_mod'], - 'cofire':bokehcolors['biopower'], - 'gas-cc':'#5E1688', - 'gas-cc-ccs':'#9467BD', -} -for i in bokehcolors.index: - if i in techcolors: - pass - elif i in bokehcolors.index: - techcolors[i] = bokehcolors[i] - else: - raise Exception(i) - -techcolors = {i: techcolors[i] for i in bokehcolors.index} - -trtype_map = pd.read_csv( - os.path.join(reeds_path,'postprocessing','bokehpivot','in','reeds2','trtype_map.csv'), - index_col='raw')['display'] -colors_trans = pd.read_csv( - os.path.join(reeds_path,'postprocessing','bokehpivot','in','reeds2','trtype_style.csv'), - index_col='order')['color'] - -#%% Parse excel report sheet names -val2sheet = reedsplots.get_report_sheetmap(cases[basecase]) - -#%% National capacity and generation -renametechs = { - 'h2-cc_upgrade':'h2-cc', - 'h2-ct_upgrade':'h2-ct', - 'gas-cc-ccs_mod_upgrade':'gas-cc-ccs_mod', - 'coal-ccs_mod_upgrade':'coal-ccs_mod', -} -dictin_cap = {} -for case in tqdm(cases, desc='national capacity'): - dictin_cap[case] = pd.read_excel( - os.path.join(cases[case],'outputs','reeds-report','report.xlsx'), - sheet_name=val2sheet['Capacity (GW)'], - ).drop('scenario',axis=1) - ### Simplify techs - dictin_cap[case].tech = dictin_cap[case].tech.map(lambda x: renametechs.get(x,x)) - dictin_cap[case] = dictin_cap[case].groupby(['tech','year'], as_index=False)['Capacity (GW)'].sum() - dictin_cap[case] = dictin_cap[case].loc[ - ~dictin_cap[case].tech.isin(['electrolyzer','smr','smr-ccs'])].copy() - -dictin_gen = {} -for case in tqdm(cases, desc='national generation'): - dictin_gen[case] = pd.read_excel( - os.path.join(cases[case],'outputs','reeds-report','report.xlsx'), - sheet_name=val2sheet['Generation (TWh)'], - ).drop('scenario',axis=1) - ### Simplify techs - dictin_gen[case].tech = dictin_gen[case].tech.map(lambda x: renametechs.get(x,x)) - dictin_gen[case] = dictin_gen[case].groupby(['tech','year'], as_index=False)['Generation (TWh)'].sum() - -costcat_rename = { - 'CO2 Spurline':'CO2 T&S Capex', - 'CO2 Pipeline':'CO2 T&S Capex', - 'CO2 Storage':'CO2 T&S Capex', - 'CO2 Spurline FOM':'CO2 T&S O&M', - 'CO2 Pipeline FOM':'CO2 T&S O&M', - 'CO2 Incentive Payments':'CCS Incentives', - 'Capital': 'Gen & Stor Capex', - 'O&M': 'Gen & Stor O&M', - 'CO2 Network':'CO2 T&S Capex', - 'CO2 Incentives':'CCS Incentives', - 'CO2 FOM':'CO2 T&S O&M', - 'CO2 Capture':'CO2 T&S Capex', - 'H2 Fuel':'Fuel', - 'H2 VOM':'H2 Prod O&M', -} -dictin_npv = {} -for case in tqdm(cases, desc='NPV of system cost'): - try: - dictin_npv[case] = pd.read_excel( - os.path.join(cases[case],'outputs','reeds-report/report.xlsx'), - sheet_name=val2sheet['Present Value of System Cost'], engine='openpyxl', - ).drop('scenario',axis=1).set_index('cost_cat')['Discounted Cost (Bil $)'] - dictin_npv[case].index = pd.Series(dictin_npv[case].index).replace(costcat_rename) - dictin_npv[case] = dictin_npv[case].groupby(level=0, sort=False).sum() - except FileNotFoundError: - print(case) - -dictin_scoe = {} -for case in tqdm(cases, desc='SCOE'): - try: - dictin_scoe[case] = pd.read_excel( - os.path.join(cases[case],'outputs','reeds-report/report.xlsx'), - sheet_name=val2sheet['National Average Electricity'], engine='openpyxl', - ).drop('scenario',axis=1) - except FileNotFoundError: - print(case) - dictin_scoe[case].cost_cat = dictin_scoe[case].cost_cat.replace( - {**costcat_rename,**{'CO2 Incentives':'CCS Incentives'}}) - dictin_scoe[case] = ( - dictin_scoe[case].groupby(['cost_cat','year'], sort=False, as_index=False) - ['Average cost ($/MWh)'].sum()) - -pollutant = 'CO2' -dictin_emissions = {} -for case in tqdm(cases, desc='national emissions'): - try: - dictin_emissions[case] = pd.read_csv( - os.path.join(cases[case], 'outputs', 'emit_nat.csv'), - header=0, names=['e','t','tonne'], index_col=['e','t'], - ### Convert to MMT - ).squeeze(1).loc[pollutant] / 1e6 - except FileNotFoundError: - print(case) - -dictin_trans = {} -for case in tqdm(cases, desc='national transmission'): - try: - dictin_trans[case] = pd.read_excel( - os.path.join(cases[case],'outputs','reeds-report','report.xlsx'), - sheet_name=val2sheet['Transmission (GW-mi)'], engine='openpyxl', - ).drop('scenario',axis=1) - except FileNotFoundError: - print(case) - -dictin_trans_r = {} -for case in tqdm(cases, desc='regional transmission'): - dictin_trans_r[case] = pd.read_csv( - os.path.join(cases[case],'outputs','tran_out.csv') - ).rename(columns={'Value':'MW'}) - for _level in ['interconnect','transreg','transgrp','st']: - dictin_trans_r[case][f'inter_{_level}'] = ( - dictin_trans_r[case].r.map(hierarchy[_level]) - != dictin_trans_r[case].rr.map(hierarchy[_level]) - ).astype(int) - -dictin_cap_r = {} -for case in tqdm(cases, desc='regional capacity'): - dictin_cap_r[case] = pd.read_csv( - os.path.join(cases[case],'outputs','cap.csv'), - names=['i','r','t','MW'], header=0, - ) - ### Simplify techs - dictin_cap_r[case].i = dictin_cap_r[case].i.map(lambda x: renametechs.get(x,x)) - dictin_cap_r[case].i = dictin_cap_r[case].i.str.lower().map(lambda x: techmap.get(x,x)) - dictin_cap_r[case] = dictin_cap_r[case].groupby(['i','r','t'], as_index=False).MW.sum() - -dictin_runtime = {} -for case in tqdm(cases, desc='runtime'): - try: - dictin_runtime[case] = pd.read_excel( - os.path.join(cases[case],'outputs','reeds-report','report.xlsx'), - sheet_name=val2sheet['Runtime (hours)'], engine='openpyxl', - ).set_index('process').processtime - except FileNotFoundError: - print(case) - -dictin_neue = {} -for case in tqdm(cases, desc='NEUE'): - try: - dictin_neue[case] = ( - pd.read_csv(os.path.join(cases[case],'outputs','neue.csv')) - .sort_values(['t','iteration']) - .drop_duplicates(subset='t', keep='last') - .set_index('t')['NEUE [ppm]'] - ) - except FileNotFoundError: - pass - -### Model years -years = sorted(dictin_cap[case].year.astype(int).unique()) -years = [y for y in years if y >= startyear] -yearstep = years[-1] - years[-2] -lastyear = max(years) -## Years for which to add data notes -startyear_sums = 2023 -allyears = range(startyear_sums,lastyear+1) -noteyears = [2035, 2050] -if all([lastyear < y for y in noteyears]): - noteyears = [lastyear] -startyear_growth = 2035 - - -#%%### Plots ###### -### Set up powerpoint file -prs = pptx.Presentation(os.path.join(reeds_path,'postprocessing','template.pptx')) -blank_slide_layout = prs.slide_layouts[3] - - -#%%### Generation capacity lines -aggtechsplot = { - 'Interregional\ntransmission': 'inter_transreg', - 'Land-based\nwind': ['wind-ons'], - 'Offshore\nwind': ['wind-ofs'], - # 'Wind': ['wind-ons', 'wind-ofs'], - 'Solar': ['upv', 'dupv', 'distpv', 'csp', 'pvb'], - 'Battery': ['battery_{}'.format(i) for i in [2,4,6,8,10]], - 'Pumped\nstorage\nhydro': ['pumped-hydro'], - # 'Storage': ['battery_{}'.format(i) for i in [2,4,6,8,10]] + ['pumped-hydro'], - 'Hydro, geo, bio': [ - 'hydro','geothermal', - 'biopower','lfill-gas','cofire','beccs_mod','beccs' - ], - 'Nuclear': ['nuclear', 'nuclear-smr'], - 'Hydrogen\nturbine': ['h2-cc', 'h2-cc-upgrade', 'h2-ct', 'h2-ct-upgrade'], - 'Gas CCS': ['gas-cc-ccs_mod'], - 'Coal CCS': ['coal-ccs_mod'], - # 'Fossil\n(with CCS)': ['gas-cc-ccs_mod','coal-ccs_mod'], - 'Fossil\n(w/o CCS)': ['gas-cc', 'gas-ct', 'o-g-s', 'coal', 'cofire'], -# 'CDR': ['dac', 'beccs'], -# 'H2 production': ['smr', 'smr-ccs', 'electrolyzer'], -} -checktechs = [i for sublist in aggtechsplot.values() for i in sublist] -alltechs = pd.concat(dictin_cap).tech.unique() -printstring = ( - 'The following techs are not plotted: ' - + ', '.join([c for c in alltechs if c not in checktechs]) -) - -offsetstart = { - 'Solar': (15,0), - 'Wind': (15,0), 'Land-based\nwind': (15,0), -} - -val = '4_Capacity (GW)' -ycol = 'Capacity (GW)' -nrows, ncols = 2, len(aggtechsplot)//2+len(aggtechsplot)%2 -coords = dict(zip( - list(aggtechsplot.keys()), - [(row,col) for row in range(nrows) for col in range(ncols)] -)) - -offset = dict() - -plt.close() -f,ax = plt.subplots( - nrows, ncols, sharex=True, sharey=True, - figsize=(13.33, 6.88), - gridspec_kw={'wspace':0.3, 'hspace':0.15}, -) -for tech in aggtechsplot: - for case in cases: - ### Central cases - if 'transmission' in tech.lower(): - df = dictin_trans_r[case].loc[ - dictin_trans_r[case][aggtechsplot[tech]]==1 - ].groupby('t').MW.sum() / 1e3 - else: - df = dictin_cap[case].loc[ - dictin_cap[case].tech.isin(aggtechsplot[tech]) - ].groupby('year')[ycol].sum().reindex(years).fillna(0) - ax[coords[tech]].plot( - df.index, df.values, - label=case, color=colors[case], ls='-', - ) - ### Annotate the last value - fincap = df.reindex([lastyear]).fillna(0).squeeze() - ax[coords[tech]].annotate( - ' {:.0f}'.format(fincap), - (lastyear, fincap+offset.get((tech,case),0)), - ha='left', va='center', - color=colors[case], fontsize='small', - annotation_clip=False, - ) - - ### Formatting - ax[coords[tech]].xaxis.set_minor_locator(mpl.ticker.MultipleLocator(5 if lastyear>2040 else 1)) - ax[coords[tech]].xaxis.set_major_locator(mpl.ticker.MultipleLocator(10 if lastyear>2040 else 5)) - ax[coords[tech]].annotate( - tech, - (0.05,1.0), va='top', ha='left', - xycoords='axes fraction', - fontsize='x-large', weight='bold',) - ### Annotate the 2020 value - plots.annotate( - ax[coords[tech]], basecase, - startyear, offsetstart.get(tech,(10,10)), color='C7', - arrowprops={'arrowstyle':'-|>', 'color':'C7'}) -if len(aggtechsplot) % 2: - ax[-1,-1].axis('off') -handles, labels = ax[-1,0].get_legend_handles_labels() -leg = ax[-1,0].legend( - handles, labels, - fontsize='large', frameon=False, - loc='upper left', bbox_to_anchor=(0,0.95), - handletextpad=0.3, handlelength=0.7, - ncol=1, -) -for legobj in leg.legend_handles: - legobj.set_linewidth(8) - legobj.set_solid_capstyle('butt') -ax[coords[list(aggtechsplot.keys())[0]]].set_xlim(startyear,lastyear) -ax[coords[list(aggtechsplot.keys())[0]]].set_ylim(0) -ax[coords[list(aggtechsplot.keys())[0]]].set_ylabel('Capacity [GW]', y=-0.075) -# for row in range(nrows): -# ax[row,0].set_ylabel('Capacity [GW]') -ax[coords[list(aggtechsplot.keys())[0]]].set_ylim(0) - -plots.despine(ax) -plt.draw() -plots.shorten_years(ax[1,0]) -### Save it -slide = add_to_pptx('Capacity') -add_textbox(printstring, slide) -if interactive: - print(printstring) - plt.show() - - -#%%### Capacity and generation bars -toplot = { - 'Capacity': {'data': dictin_cap, 'values':'Capacity (GW)', 'label':'Capacity [GW]'}, - 'Generation': {'data': dictin_gen, 'values':'Generation (TWh)', 'label':'Generation [TWh]'}, -} -dfbase = {} -for slidetitle, data in toplot.items(): - plt.close() - f,ax = plt.subplots( - 2, len(cases), figsize=(13.33, 6.8), - sharex=True, sharey=sharey, dpi=None, - ) - ax[0,0].set_ylabel(data['label'], y=-0.075) - ax[0,0].set_xlim(2017.5, lastyear+2.5) - ax[1,0].annotate( - f'Diff\nfrom\n{basecase}', (0.03,0.03), xycoords='axes fraction', - fontsize='x-large', weight='bold') - ###### Absolute - alltechs = set() - for col, case in enumerate(cases): - if case not in data['data']: - continue - dfplot = data['data'][case].pivot(index='year', columns='tech', values=data['values']) - dfplot = ( - dfplot[[c for c in bokehcolors.index if c in dfplot]] - .round(3).replace(0,np.nan) - .dropna(axis=1, how='all') - ) - if case == basecase: - dfbase[slidetitle] = dfplot.copy() - alltechs.update(dfplot.columns) - plots.stackbar(df=dfplot, ax=ax[0,col], colors=techcolors, width=yearstep, net=False) - ax[0,col].set_title( - case.replace('__','\n'), - fontsize=14, weight='bold', x=0, ha='left', pad=8,) - ax[0,col].xaxis.set_major_locator(mpl.ticker.MultipleLocator(10)) - ax[0,col].xaxis.set_minor_locator(mpl.ticker.MultipleLocator(5)) - - - ### Legend - handles = [ - mpl.patches.Patch(facecolor=techcolors[i], edgecolor='none', label=i.replace('Canada','imports')) - for i in techcolors if i in alltechs - ] - leg = ax[0,-1].legend( - handles=handles[::-1], loc='upper left', bbox_to_anchor=(1.0,1.0), - fontsize='medium', ncol=1, frameon=False, - handletextpad=0.3, handlelength=0.7, columnspacing=0.5, - ) - - ###### Difference - for col, case in enumerate(cases): - ax[1,col].xaxis.set_major_locator(mpl.ticker.MultipleLocator(10)) - ax[1,col].xaxis.set_minor_locator(mpl.ticker.MultipleLocator(5)) - ax[1,col].axhline(0,c='k',ls='--',lw=0.75) - - if (case not in data['data']) or (case == basecase): - continue - dfplot = data['data'][case].pivot(index='year', columns='tech', values=data['values']) - dfplot = ( - dfplot - .round(3).replace(0,np.nan) - .dropna(axis=1, how='all') - ) - dfplot = dfplot.subtract(dfbase[slidetitle], fill_value=0) - dfplot = dfplot[[c for c in bokehcolors.index if c in dfplot]].copy() - alltechs.update(dfplot.columns) - plots.stackbar(df=dfplot, ax=ax[1,col], colors=techcolors, width=yearstep, net=True) - - plots.despine(ax) - plt.draw() - plots.shorten_years(ax[1,0]) - ### Save it - slide = add_to_pptx(slidetitle+' stack') - if interactive: - plt.show() - - -#%%### Hodgepodge 1: NPV, final capacity, final generation, runtime -plt.close() -f,ax = plt.subplots( - 2, 4, figsize=(13.33, 6.88), sharex=True, - sharey=('col' if (sharey is True) else False), -) -handles = {} -### NPV of system cost -ax[0,0].set_ylabel('NPV of system cost [$B]', y=-0.075) -ax[0,0].axhline(0, c='k', ls='--', lw=0.75) -dfplot = pd.concat(dictin_npv, axis=1).T -dfplot = dfplot[[c for c in bokehcostcolors.index if c in dfplot]].copy() -## Absolute and difference -for (row, df) in enumerate([dfplot, dfplot - dfplot.loc[basecase]]): - plots.stackbar(df=df, ax=ax[row,0], colors=bokehcostcolors, net=True, width=0.8) - ymin, ymax = ax[row,0].get_ylim() - ypad = (ymax - ymin) * 0.02 - ## label net value - for x, case in enumerate(df.index): - val = df.loc[case].sum() - ax[row,0].annotate( - f'{val:.0f}', (x, val - ypad), ha='center', va='top', color='k', size=9, - path_effects=[pe.withStroke(linewidth=2.0, foreground='w', alpha=0.5)], - ) -## Legend info -handles['NPV'] = [ - mpl.patches.Patch(facecolor=bokehcostcolors[i], edgecolor='none', label=i) - for i in bokehcostcolors.index if i in dfplot -] - -### Final capacity and generation -toplot = { - 'Capacity': {'data': dictin_cap, 'values':'Capacity (GW)', 'label':f' {lastyear} Capacity [GW]'}, - 'Generation': {'data': dictin_gen, 'values':'Generation (TWh)', 'label':f' {lastyear} Generation [TWh]'}, -} -ax[0,2].axhline(0, c='k', ls='--', lw=0.75) -for col, (datum, data) in enumerate(toplot.items()): - ax[0,col+1].set_ylabel(data['label'], y=-0.075) - dfplot = pd.concat( - {case: - data['data'][case].loc[data['data'][case].year==lastyear] - .set_index('tech')[data['values']] - for case in cases}, - axis=1, - ).T - dfplot = ( - dfplot[[c for c in bokehcolors.index if c in dfplot]] - .round(3).replace(0,np.nan).dropna(axis=1, how='all') - ) - ## Absolute and difference - for (row, df) in enumerate([dfplot, dfplot - dfplot.loc[basecase]]): - plots.stackbar(df=df, ax=ax[row,col+1], colors=techcolors, width=0.8, net=row) - ## Legend info - handles[datum] = [ - mpl.patches.Patch(facecolor=techcolors[i], edgecolor='none', label=i) - for i in techcolors if i in dfplot - ] - -### Runtime -ax[0,3].set_ylabel('Runtime [hours]', y=-0.075) -dfplot = pd.concat(dictin_runtime, axis=1).T -dfplot = dfplot[[c for c in colors_time.index if c in dfplot]].copy() -for (row, df) in enumerate([dfplot, dfplot - dfplot.loc[basecase]]): - plots.stackbar(df=df, ax=ax[row,3], colors=colors_time, width=0.8, net=row) -## Legend info -handles['Runtime'] = [ - mpl.patches.Patch(facecolor=colors_time[i], edgecolor='none', label=i) - for i in colors_time.index if i in dfplot -] - -### Formatting -for col in range(4): - ax[1,col].set_xticks(range(len(cases))) - ax[1,col].set_xticklabels(cases.keys(), rotation=90) - ax[1,col].annotate('Diff', (0.03,0.03), xycoords='axes fraction', fontsize='large') - ax[1,col].axhline(0, c='k', ls='--', lw=0.75) -plt.tight_layout() -plots.despine(ax) -plt.draw() -### Save it -slide = add_to_pptx('Cost, capacity, generation, runtime') -if interactive: - plt.show() - -### Add legends as separate figure below the slide -plt.close() -f,ax = plt.subplots(1, 4, figsize=(13.33, 0.1)) -for col, datum in enumerate(handles): - leg = ax[col].legend( - handles=handles[datum][::-1], loc='upper center', bbox_to_anchor=(0.5,1.0), - fontsize='medium', ncol=1, frameon=False, - handletextpad=0.3, handlelength=0.7, columnspacing=0.5, - ) - ax[col].axis('off') -add_to_pptx(slide=slide, top=7.5) - - -#%%### Hodgepodge 2: SCOE, CO2 emissions, NEUE -plt.close() -f,ax = plt.subplots(1, 4, figsize=(11, 4.5), gridspec_kw={'wspace':0.6}) - -### SCOE -for case in cases: - df = dictin_scoe[case].groupby('year')['Average cost ($/MWh)'].sum().loc[years] - ax[0].plot(df.index, df.values, label=case, color=colors[case]) - ## annotate the last value - val = df.loc[max(years)] - ax[0].annotate( - f' {val:.0f}', - (max(years), val), ha='left', va='center', - color=colors[case], fontsize='medium', - ) -ax[0].set_ylim(0) -ax[0].set_ylabel('System cost of electricity [$/MWh]') - -### CO2 emissions -note = [] -for case in cases: - df = dictin_emissions[case].reindex(years).fillna(0) - df_allyears = df.reindex(allyears).interpolate('linear') - ax[1].plot(df.index, df.values, label=case, color=colors[case]) - ## annotate the last value - val = np.around(df.loc[max(years)], 0) + 0 - ax[1].annotate( - f' {val:.0f}', - (max(years), val), ha='left', va='center', - color=colors[case], fontsize='medium', - ) - ## collect more notes - note.append( - f"{case:<{maxlength}}:" - + ','.join([f" {df_allyears[y]:.0f} MMT {y}" for y in noteyears]) - + f"; {df_allyears.sum()/1e3:.2f} GT {startyear_sums}–{lastyear}" - ) -ax[1].set_ylim(0) -ax[1].axhline(phaseout_trigger, c='C7', ls='--', lw=0.75) -ax[1].set_ylabel(f'{pollutant} emissions [MMT/yr]') -## Notes -ax[1].annotate( - '\n'.join(note), (-0.2, -0.1), xycoords='axes fraction', va='top', - annotation_clip=False, fontsize=9, fontfamily='monospace', -) - -### NEUE -if len(dictin_neue): - for case in cases: - if case in dictin_neue: - df = dictin_neue[case].reindex([y for y in years if y >= 2025]) - ax[2].plot(df.index, df.values, label=case, color=colors[case]) - ## annotate the last value - val = np.around(df.loc[max(years)], 0) + 0 - ax[2].annotate( - f' {val:.0f}', - (max(years), val), ha='left', va='center', - color=colors[case], fontsize='medium', - ) - ax[2].set_ylim(0) - if ax[2].get_ylim()[1] >= 10: - ax[2].axhline(10, c='C7', ls='--', lw=0.75) - ax[2].set_ylabel('NEUE [ppm]') -else: - ax[2].axis('off') - -### Spare -ax[3].axis('off') - -### Legend -leg = ax[0].legend( - loc='upper left', bbox_to_anchor=(-0.3,-0.05), frameon=False, fontsize='large', - handletextpad=0.3, handlelength=0.7, -) -for legobj in leg.legend_handles: - legobj.set_linewidth(8) - legobj.set_solid_capstyle('butt') - -### Formatting -# plt.tight_layout() -plots.despine(ax) -plt.draw() -for col in range(3): - ax[col].xaxis.set_major_locator(mpl.ticker.MultipleLocator(10)) - ax[col].xaxis.set_minor_locator(mpl.ticker.AutoMinorLocator(2)) - plots.shorten_years(ax[col]) -### Save it -slide = add_to_pptx('Cost, emissions, reliability') -if interactive: - plt.show() - - -#%%### Transmission -plt.close() -f,ax = plt.subplots(1, 4, figsize=(11, 4.5), gridspec_kw={'wspace':0.6}) - -### Transmission TW-miles over time -for case in cases: - df = dictin_trans[case].groupby('year').sum()['Amount (GW-mi)'].reindex(years) / 1e3 - ax[0].plot(df.index, df.values, label=case, color=colors[case]) - ## annotate the last value - val = np.around(df.loc[max(years)], 0) + 0 - ax[0].annotate( - f' {val:.0f}', - (max(years), val), ha='left', va='center', - color=colors[case], fontsize='medium', - ) -ax[0].set_ylim(0) -ax[0].set_ylabel('Transmission capacity [TW-mi]') - -### Disaggregated transmission (for next two plots) -dftrans = pd.concat({ - case: - dictin_trans[case].groupby(['year','trtype'])['Amount (GW-mi)'].sum() - .unstack('trtype').reindex(allyears).interpolate('linear') - / 1e3 - for case in cases -}, axis=1) - -### Disaggregated final year transmission capacity -df = dftrans.loc[lastyear].unstack('trtype') -plots.stackbar(df=df, ax=ax[1], colors=colors_trans, width=0.8, net=False) -ax[1].set_ylabel('Transmission capacity [TW-mi]') - -### Transmission growth -df = ( - (dftrans.loc[lastyear] - dftrans.loc[startyear_growth]) - / (lastyear - startyear_growth) -).unstack('trtype') -plots.stackbar(df=df, ax=ax[2], colors=colors_trans, width=0.8, net=False) -ax[2].set_ylabel(f'Transmission growth,\n{startyear_growth}–{lastyear} [TWmi/year]') -## Scales -ymax = ax[2].get_ylim()[1] -scales = { - 0.73: 'Max since 2014 (345+ kV)', - 1.83: 'Max since 2014 (all kV)', - 3.64: 'Max since 2009 (all kV)', - 1476 * 6.3 / 1e3: '1× Rio Madeira per year', -} -for y, label in scales.items(): - if y > ymax: - continue - ax[2].annotate( - label, xy=(len(cases), y), xytext=(len(cases)*1.15, y), annotation_clip=False, - arrowprops={'arrowstyle':'-|>', 'color':'k'}, - ha='left', va='center', color='k', fontsize=11, - ) - ax[2].axhline( - y, c='k', lw=0.5, ls='--', - path_effects=[pe.withStroke(linewidth=1.5, foreground='w', alpha=0.5)]) - -### Spare -ax[3].axis('off') - -### Legends -## Traces -_h, _l = ax[0].get_legend_handles_labels() -leg = ax[0].legend( - # _h[::-1], _l[::-1], - loc='upper left', bbox_to_anchor=(-0.4,-0.05), frameon=False, fontsize='large', - handletextpad=0.3, handlelength=0.7, -) -for legobj in leg.legend_handles: - legobj.set_linewidth(8) - legobj.set_solid_capstyle('butt') -## Transmission types -handles = [ - mpl.patches.Patch(facecolor=colors_trans[i], edgecolor='none', label=i) - for i in colors_trans.index if i in dftrans.columns.get_level_values('trtype') -] -leg = ax[2].legend( - handles=handles[::-1], - loc='upper left', bbox_to_anchor=(1,-0.05), frameon=False, fontsize='large', - handletextpad=0.3, handlelength=0.7, -) - -### Formatting -for col in [1,2]: - ax[col].set_xticks(range(len(cases))) - ax[col].set_xticklabels(cases.keys(), rotation=90) - ax[col].yaxis.set_minor_locator(mpl.ticker.AutoMinorLocator(5)) - -plots.despine(ax) -plt.draw() -for col in [0]: - ax[col].xaxis.set_major_locator(mpl.ticker.MultipleLocator(10)) - ax[col].xaxis.set_minor_locator(mpl.ticker.AutoMinorLocator(2)) - plots.shorten_years(ax[col]) -### Save it -slide = add_to_pptx('Transmission') -if interactive: - plt.show() - - -#%%### All-in-one maps -for case in cases: - try: - slide = add_to_pptx( - case, - file=os.path.join( - cases[case],'outputs','maps', - f'map_gencap_transcap-{lastyear}-sideplots.png'), - width=None, height=6.88, - ) - except FileNotFoundError: - print(f'No all-in-one map for {os.path.basename(cases[case])}') - - -#%%### Generation capacity maps -### Capacity maps -nrows, ncols, coords = plots.get_coordinates(cases, aspect=2) -if (nrows == 1) or (ncols == 1): - legendcoords = max(nrows, ncols) - 1 -elif (nrows-1, ncols-1) in coords.values(): - legendcoords = (nrows-1, ncols-1) -else: - legendcoords = (nrows-2, ncols-1) - -### Shared data -base = cases[list(cases.keys())[0]] -val_r = dictin_cap_r[basecase].r.unique() -dfba = reedsplots.get_zonemap(base).loc[val_r] -dfstates = dfba.dissolve('st') - -### Set up plot -for tech in maptechs: - ### Get limits - vmin = 0. - vmax = float(pd.concat({ - case: dictin_cap_r[case].loc[ - (dictin_cap_r[case].i==tech) - & (dictin_cap_r[case].t.astype(int)==lastyear) - ].groupby('r').MW.sum() - for case in cases - }).max()) / 1e3 - if np.isnan(vmax): - vmax = 0. - if not vmax: - print(f'{tech} has zero capacity in {lastyear}, so skipping maps') - continue - ### Set up plot - plt.close() - f,ax = plt.subplots( - nrows, ncols, figsize=(13.33, 6.88), - gridspec_kw={'wspace':0.0,'hspace':-0.1}, - ) - ### Plot it - for case in cases: - dfval = dictin_cap_r[case].loc[ - (dictin_cap_r[case].i==tech) - & (dictin_cap_r[case].t.astype(int)==lastyear) - ].groupby('r').MW.sum() - dfplot = dfba.copy() - dfplot['GW'] = (dfval / 1e3).fillna(0) - - ax[coords[case]].set_title(case) - dfba.plot( - ax=ax[coords[case]], - facecolor='none', edgecolor='k', lw=0.1, zorder=10000) - dfstates.plot( - ax=ax[coords[case]], - facecolor='none', edgecolor='k', lw=0.2, zorder=10001) - dfplot.plot( - ax=ax[coords[case]], column='GW', cmap=cmap, vmin=vmin, vmax=vmax, - legend=False, - ) - ## Legend - if coords[case] == legendcoords: - plots.addcolorbarhist( - f=f, ax0=ax[coords[case]], data=dfplot.GW.values, - title=f'{tech} {lastyear}\ncapacity [GW]', cmap=cmap, vmin=vmin, vmax=vmax, - orientation='horizontal', labelpad=2.25, histratio=0., - cbarwidth=0.05, cbarheight=0.85, - cbarbottom=-0.05, cbarhoffset=0., - ) - - for row in range(nrows): - for col in range(ncols): - if nrows == 1: - ax[col].axis('off') - elif ncols == 1: - ax[row].axis('off') - else: - ax[row,col].axis('off') - ### Save it - slide = add_to_pptx(f'{tech} capacity {lastyear} [GW]') - if interactive: - plt.show() - -#%% Difference maps -### Set up plot -for tech in maptechs: - ### Get limits - dfval = pd.concat({ - case: dictin_cap_r[case].loc[ - (dictin_cap_r[case].i==tech) - & (dictin_cap_r[case].t.astype(int)==lastyear) - ].groupby('r').MW.sum() - for case in cases - }, axis=1).fillna(0) / 1e3 - dfdiff = dfval.subtract(dfval[basecase], axis=0) - ### Get colorbar limits - absmax = dfval.stack().max() - diffmax = dfdiff.unstack().abs().max() - - if np.isnan(absmax): - absmax = 0. - if not absmax: - print(f'{tech} has zero capacity in {lastyear}, so skipping maps') - continue - ### Set up plot - plt.close() - f,ax = plt.subplots( - nrows, ncols, figsize=(13.33, 6.88), - gridspec_kw={'wspace':0.0,'hspace':-0.1}, - ) - ### Plot it - for case in cases: - dfplot = dfba.copy() - dfplot['GW'] = dfval[case] if case == basecase else dfdiff[case] - - ax[coords[case]].set_title(case) - dfba.plot( - ax=ax[coords[case]], - facecolor='none', edgecolor='k', lw=0.1, zorder=10000) - dfstates.plot( - ax=ax[coords[case]], - facecolor='none', edgecolor='k', lw=0.2, zorder=10001) - dfplot.plot( - ax=ax[coords[case]], column='GW', - cmap=(cmap if case == basecase else cmap_diff), - vmin=(0 if case == basecase else -diffmax), - vmax=(absmax if case == basecase else diffmax), - legend=False, - ) - ## Difference legend - if coords[case] == legendcoords: - plots.addcolorbarhist( - f=f, ax0=ax[coords[case]], data=dfplot.GW.values, - title=f'{tech} {lastyear}\ncapacity, difference\nfrom {basecase} [GW]', - cmap=(cmap if case == basecase else cmap_diff), - vmin=(0 if case == basecase else -diffmax), - vmax=(absmax if case == basecase else diffmax), - orientation='horizontal', labelpad=2.25, histratio=0., - cbarwidth=0.05, cbarheight=0.85, - cbarbottom=-0.05, cbarhoffset=0., - ) - ## Absolute legend - plots.addcolorbarhist( - f=f, ax0=ax[coords[basecase]], data=dfval[basecase].values, - title=f'{tech} {lastyear}\ncapacity [GW]', - cmap=cmap, vmin=0, vmax=absmax, - orientation='horizontal', labelpad=2.25, histratio=0., - cbarwidth=0.05, cbarheight=0.85, - cbarbottom=-0.05, cbarhoffset=0., - ) - - for row in range(nrows): - for col in range(ncols): - if nrows == 1: - ax[col].axis('off') - elif ncols == 1: - ax[row].axis('off') - else: - ax[row,col].axis('off') - ### Save it - slide = add_to_pptx(f'Difference: {tech} capacity {lastyear} [GW]') - if interactive: - plt.show() - - -#%%### Transmission maps -wscale = 0.0003 -alpha = 0.8 -for subtract_baseyear in [None, 2020]: - plt.close() - f,ax = plt.subplots( - nrows, ncols, figsize=(13.33, 6.88), - gridspec_kw={'wspace':0.0,'hspace':-0.1}, - ) - for case in cases: - ### Plot it - reedsplots.plot_trans_onecase( - case=cases[case], pcalabel=False, wscale=wscale, - yearlabel=False, year=lastyear, simpletypes=None, - alpha=alpha, scalesize=8, - f=f, ax=ax[coords[case]], title=False, - subtract_baseyear=subtract_baseyear, - thickborders='transreg', drawstates=False, drawzones=False, - label_line_capacity=10, - scale=(True if case == basecase else False), - ) - ax[coords[case]].set_title(case) - ### Formatting - title = ( - f'New interzonal transmission since {subtract_baseyear}' if subtract_baseyear - else 'All interzonal transmission') - for row in range(nrows): - for col in range(ncols): - if nrows == 1: - ax[col].axis('off') - elif ncols == 1: - ax[row].axis('off') - else: - ax[row,col].axis('off') - ### Save it - slide = add_to_pptx(title) - if interactive: - plt.show() - -#%% Transmission difference -plt.close() -f,ax = plt.subplots( - nrows, ncols, figsize=(13.33, 6.88), - gridspec_kw={'wspace':0.0,'hspace':-0.1}, -) -for case in cases: - ax[coords[case]].set_title(case) - if case == basecase: - ### Plot absolute - reedsplots.plot_trans_onecase( - case=cases[case], pcalabel=False, wscale=wscale, - yearlabel=False, year=lastyear, simpletypes=None, - alpha=alpha, scalesize=8, - f=f, ax=ax[coords[case]], title=False, - subtract_baseyear=subtract_baseyear, - thickborders='transreg', drawstates=False, drawzones=False, - label_line_capacity=10, - scale=(True if case == basecase else False), - ) - else: - ### Plot the difference - reedsplots.plot_trans_diff( - casebase=cases[basecase], casecomp=cases[case], - pcalabel=False, wscale=wscale, - yearlabel=False, year=lastyear, simpletypes=None, - alpha=alpha, - f=f, ax=ax[coords[case]], - subtract_baseyear=subtract_baseyear, - thickborders='transreg', drawstates=False, drawzones=False, - label_line_capacity=10, - scale=False, - ) -### Formatting -title = 'Interzonal transmission difference' -for row in range(nrows): - for col in range(ncols): - if nrows == 1: - ax[col].axis('off') - elif ncols == 1: - ax[row].axis('off') - else: - ax[row,col].axis('off') -### Save it -slide = add_to_pptx(title) -if interactive: - plt.show() - - -#%%### Interregional transfer capability to peak demand ratio -try: - f, ax, dfplot = reedsplots.plot_interreg_transfer_cap_ratio( - case=list(cases.values()), - colors={v: colors[k] for k,v in cases.items()}, - casenames={v:k for k,v in cases.items()}, - level=level, tstart=startyear, - ymax=None, - ) - ### Save it - slide = add_to_pptx('Interregional transmission / peak demand') - if interactive: - plt.show() - -except Exception: - print(traceback.format_exc()) - - -#%% Save the powerpoint file -prs.save(savename) -print(f'\ncompare_casegroup.py results saved to:\n{savename}') - -### Open it -if sys.platform == 'darwin': - sp.run(f"open '{savename}'", shell=True) -elif platform.system() == 'Windows': - sp.run(f'"{savename}"', shell=True) diff --git a/postprocessing/compare_cases.py b/postprocessing/compare_cases.py index 06b98c6..4233a8f 100644 --- a/postprocessing/compare_cases.py +++ b/postprocessing/compare_cases.py @@ -1,35 +1,155 @@ #%% Imports +import numpy as np import pandas as pd +import matplotlib as mpl import matplotlib.pyplot as plt +from matplotlib import patheffects as pe import os -import io import sys +import io +import argparse import site -import platform import subprocess as sp -import argparse -import geopandas as gpd +import platform +from glob import glob +from tqdm import tqdm +import traceback +import cmocean import pptx -from pptx.util import Inches -os.environ['PROJ_NETWORK'] = 'OFF' +from pptx.util import Inches, Pt reeds_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) -remotepath = '/Volumes/ReEDS/' if sys.platform == 'darwin' else r'//nrelnas01/ReEDS/' ### Format plots and load other convenience functions site.addsitedir(os.path.join(reeds_path,'postprocessing')) import plots import reedsplots +from bokehpivot.defaults import DEFAULT_DOLLAR_YEAR, DEFAULT_PV_YEAR, DEFAULT_DISCOUNT_RATE plots.plotparams() +#%% Argument inputs +parser = argparse.ArgumentParser( + description='Compare multiple ReEDS cases', + formatter_class=argparse.ArgumentDefaultsHelpFormatter, +) +parser.add_argument( + 'caselist', type=str, nargs='+', + help=('space-delimited list of cases to plot, OR shared casename prefix, ' + 'OR csv file of cases. The first case is treated as the base case ' + 'unless a different one is provided via the --basecase/-b argument.')) +parser.add_argument( + '--casenames', '-n', type=str, default='', + help='comma-delimited list of shorter case names to use in plots') +parser.add_argument( + '--titleshorten', '-s', type=str, default='', + help='characters to cut from start of case name (only used if no casenames)') +parser.add_argument( + '--startyear', '-t', type=int, default=2020, + help='First year to show') +parser.add_argument( + '--sharey', '-y', action='store_true', + help='Use same y-axis scale for absolute and difference plots') +parser.add_argument( + '--basecase', '-b', type=str, default='', + help='Substring of case path to use as default (if empty, uses first case in list)') +parser.add_argument( + '--skipbp', '-p', action='store_true', + help='flag to prevent bokehpivot report from being generated') +parser.add_argument( + '--bpreport', '-r', type=str, default='standard_report_reduced', + help='which bokehpivot report to generate') +parser.add_argument( + '--detailed', '-d', action='store_true', + help='Include more detailed plots') + +args = parser.parse_args() +_caselist = args.caselist +_casenames = args.casenames +try: + titleshorten = int(args.titleshorten) +except ValueError: + titleshorten = len(args.titleshorten) +_basecase = args.basecase +startyear = args.startyear +sharey = True if args.sharey else 'row' +bpreport = args.bpreport +skipbp = args.skipbp +detailed = args.detailed +interactive = False + +#%% Inputs for testing +# reeds_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +# _caselist = [os.path.join(reeds_path,'postprocessing','example.csv')] +# _casenames = '' +# titleshorten = 0 +# startyear = 2020 +# sharey = 'row' +# _basecase = '' +# skipbp = True +# bpreport = 'standard_report_reduced' +# interactive = True +# detailed = False + + +#%%### Fixed inputs +cmap = cmocean.cm.rain +cmap_diff = plt.cm.RdBu_r +## https://www.whitehouse.gov/wp-content/uploads/2023/11/CircularA-4.pdf +discountrate_social = DEFAULT_DISCOUNT_RATE +## https://www.epa.gov/environmental-economics/scghg +discountrate_scghg = 0.02 +assert discountrate_scghg in [0.015, 0.02, 0.025] +central_health = {'cr':'ACS', 'model':'EASIUR'} +reeds_dollaryear = 2004 +output_dollaryear = DEFAULT_DOLLAR_YEAR +startyear_notes = DEFAULT_PV_YEAR + +colors_social = { + 'CO2': plt.cm.tab20b(4), + 'CH4': plt.cm.tab20b(5), + 'health': plt.cm.tab20b(7), +} + +techmap = { + **{f'upv_{i}':'Utility PV' for i in range(20)}, + **{f'dupv_{i}':'Utility PV' for i in range(20)}, + **{f'wind-ons_{i}':'Land-based wind' for i in range(20)}, + **{f'wind-ofs_{i}':'Offshore wind' for i in range(20)}, + **dict(zip(['nuclear','nuclear-smr'], ['Nuclear']*20)), + **dict(zip( + ['gas-cc_re-cc','gas-ct_re-ct','re-cc','re-ct', + 'gas-cc_h2-ct','gas-ct_h2-ct','h2-cc','h2-ct',], + ['H2 turbine']*20)), + **{f'battery_{i}':'Battery/PSH' for i in range(20)}, **{'pumped-hydro':'Battery/PSH'}, + **dict(zip( + ['coal-igcc', 'coaloldscr', 'coalolduns', 'gas-cc', 'gas-ct', 'coal-new', 'o-g-s',], + ['Fossil']*20)), + **dict(zip( + ['gas-cc_gas-cc-ccs_mod','gas-cc_gas-cc-ccs_max','gas-cc-ccs_mod','gas-cc-ccs_max', + 'gas-cc_gas-cc-ccs_mod','coal-igcc_coal-ccs_mod','coal-new_coal-ccs_mod', + 'coaloldscr_coal-ccs_mod','coalolduns_coal-ccs_mod','cofirenew_coal-ccs_mod', + 'cofireold_coal-ccs_mod','gas-cc_gas-cc-ccs_max','coal-igcc_coal-ccs_max', + 'coal-new_coal-ccs_max','coaloldscr_coal-ccs_max','coalolduns_coal-ccs_max', + 'cofirenew_coal-ccs_max','cofireold_coal-ccs_max',], + ['Fossil+CCS']*50)), + **dict(zip(['dac','beccs_mod','beccs_max'],['CO2 removal']*20)), +} + +maptechs = [ + 'Utility PV', + 'Land-based wind', + 'Offshore wind', + 'Nuclear', + 'H2 turbine', + 'Battery/PSH', + 'Fossil+CCS', +] -#%% User inputs -plotvals = [ +plotdiffvals = [ 'Generation (TWh)', 'Capacity (GW)', 'New Annual Capacity (GW)', 'Annual Retirements (GW)', - 'Final Gen by timeslice (GW)', 'Firm Capacity (GW)', 'Curtailment Rate', 'Transmission (GW-mi)', @@ -42,61 +162,240 @@ 'Runtime by year (hours)', ] onlytechs = None -yearmin = 2020 -yearmax = 2050 i_plots = ['wind-ons','upv','battery','h2-ct','nuclear','gas-cc-ccs','coal-ccs',] ## mapdiff: 'cap' or 'gen_ann' mapdiff = 'cap' -interactive = False -#%% Argument inputs -parser = argparse.ArgumentParser(description='run ReEDS2PRAS') -parser.add_argument('casebase', type=str, - help='path to ReEDS run folder for base case') -parser.add_argument('casecomp', type=str, - help='path to ReEDS run folder for comparison case') -parser.add_argument('--year', '-y', type=int, default=0, - help='year to run') -parser.add_argument('--titleshorten', '-s', type=str, default='', - help='characters to cut from start of case name (int or str)') -parser.add_argument('--skipbp', '-p', action='store_true', - help='flag to prevent bokehpivot report from being generated') -parser.add_argument('--bpreport', '-b', type=str, default='standard_report_reduced', - help='which bokehpivot report to generate') -args = parser.parse_args() -casebase = args.casebase -casecomp = args.casecomp -year = args.year -bpreport = args.bpreport -skipbp = args.skipbp -try: - titleshorten = int(args.titleshorten) -except ValueError: - titleshorten = len(args.titleshorten) +#%%### Functions +def add_to_pptx( + title=None, file=None, left=0, top=0.62, width=13.33, height=None, + verbose=1, slide=None, + ): + """Add current matplotlib figure (or file if specified) to new powerpoint slide""" + if not file: + image = io.BytesIO() + plt.savefig(image, format='png') + else: + image = file + if not os.path.exists(image): + raise FileNotFoundError(image) + + if slide is None: + slide = prs.slides.add_slide(blank_slide_layout) + slide.shapes.title.text = title + slide.shapes.add_picture( + image, + left=(None if left is None else Inches(left)), + top=(None if top is None else Inches(top)), + width=(None if width is None else Inches(width)), + height=(None if height is None else Inches(height)), + ) + if verbose: + print(title) + return slide + + +def add_textbox( + text, slide, + left=0, top=7.2, width=13.33, height=0.3, + fontsize=14, + ): + """Add a textbox to the specified slide""" + textbox = slide.shapes.add_textbox( + left=(None if left is None else Inches(left)), + top=(None if top is None else Inches(top)), + width=(None if width is None else Inches(width)), + height=(None if height is None else Inches(height)), + ) + p = textbox.text_frame.paragraphs[0] + run = p.add_run() + run.text = text + font = run.font + font.size = Pt(fontsize) + return slide + + +def plot_bars_abs_stacked( + dfplot, basecase, colors, ax, col=0, + net=True, label=True, ypad=0.02, fontsize=9, + ): + """ + * ax must have at least 2 rows + * dfplot must have cases as rows and stacked bar elements (matching colors) as cols + """ + ## Absolute and difference + if isinstance(basecase, str): + dfdiff = dfplot - dfplot.loc[basecase] + elif isinstance(basecase, list): + dfdiff = dfplot - dfplot.loc[basecase].values + elif isinstance(basecase, dict): + dfdiff = dfplot - dfplot.loc[basecase.values()].values + + for (row, df) in enumerate([dfplot, dfdiff]): + plots.stackbar(df=df, ax=ax[row,col], colors=colors, net=(net or row), width=0.8) + ymin, ymax = ax[row,col].get_ylim() + _ypad = (ymax - ymin) * ypad + ## label net value + if label: + for x, case in enumerate(df.index): + val = df.loc[case].sum() + if np.around(val, 0) == 0: + continue + ax[row,col].annotate( + f'{val:.0f}', (x, val - _ypad), ha='center', va='top', + color='k', size=fontsize, + path_effects=[pe.withStroke(linewidth=2.0, foreground='w', alpha=0.7)], + ) + ## Legend info + legend_handles = [ + mpl.patches.Patch(facecolor=colors[i], edgecolor='none', label=i) + for i in (colors if isinstance(colors, dict) else colors.index) if i in dfplot + ] + return legend_handles -# #%% Inputs for testing -# casebase = os.path.join(reeds_path,'runs','v20230509_onelineM0_NEIAIL_No') -# casecomp = os.path.join(reeds_path,'runs','v20230509_onelineM0_NEIAIL_No_p39p80') -# casebase = ( -# '/Volumes/ReEDS/FY22-NTP/Candidates/Archive/ReEDSruns/' -# '20230418/v20230418_prasH0_Xlim_DemHi_90by2035EP__core' -# ) -# casecomp = ( -# '/Volumes/ReEDS/FY22-NTP/Candidates/Archive/ReEDSruns/' -# '20230418/v20230418_prasH0_AC_DemHi_90by2035EP__core' -# ) -# year = 0 -# interactive = True #%%### Procedure -casebase_name = os.path.basename(casebase) -casecomp_name = os.path.basename(casecomp) -#%% Create output folder if it doesn't exist -outpath = f'{casecomp}/outputs/comparisons' +#%% Parse arguments +use_table_casenames = False +use_table_colors = False +use_table_bases = False +if len(_caselist) == 1: + ## If it's a .csv, read the cases to compare + if _caselist[0].endswith('.csv'): + dfcase = pd.read_csv(_caselist[0], header=None, comment='#', quoting=3) + ## First check it's a simple csv with one case per row + if dfcase.shape[1] == 1: + caselist = dfcase[0].tolist() + ## Then check if it's a csv with [casepath,casename] in the header + elif ( + ('casepath' in dfcase.loc[0].tolist()) + and ('casename' in dfcase.loc[0].tolist()) + ): + dfcase = dfcase.T.set_index(0).T + ## Drop cases that haven't finished yet + unfinished = dfcase.loc[ + ~dfcase.casepath.map( + lambda x: os.path.isfile(os.path.join(x,'outputs','reeds-report','report.xlsx'))) + ].index + if len(unfinished): + print('The following cases have not yet finished:') + print('\n'.join(dfcase.loc[unfinished].casepath.tolist())) + dfcase = dfcase.drop(unfinished).copy() + caselist = dfcase.casepath.tolist() + use_table_casenames = True + if 'color' in dfcase: + if not dfcase.color.isnull().any(): + use_table_colors = True + if 'base' in dfcase: + if not dfcase.base.isnull().any(): + use_table_bases = True + ## Otherwise assume it's a copy of a cases_{batchname}.csv file in a case folder + ## This approach is less robust; the others are preferred. + else: + prefix_plus_tail = os.path.dirname(_caselist[0]) + tails = [i for i in dfcase.iloc[0] if i not in ['Default Value',np.nan]] + prefix = prefix_plus_tail[:-len([i for i in tails if prefix_plus_tail.endswith(i)][0])] + caselist = [prefix+i for i in tails] + ## Otherwise look for all runs starting with the provided string + else: + caselist = sorted(glob(_caselist[0]+'*')) + ## If no titleshorten is provided, use the provided prefix + if not titleshorten: + titleshorten = len(os.path.basename(_caselist)) +else: + caselist = _caselist + +## Remove cases that haven't finished yet +caselist = [ + i for i in caselist + if os.path.isfile(os.path.join(i,'outputs','reeds-report','report.xlsx')) +] + +## Get the casenames +if use_table_casenames: + casenames = [c.replace('\\n','\n') for c in dfcase.casename.tolist()] +else: + casenames = ( + _casenames.split(',') if len(_casenames) + else [os.path.basename(c)[titleshorten:] for c in caselist] + ) + +if len(caselist) != len(casenames): + err = ( + f"len(caselist) = {len(caselist)} but len(casenames) = {len(casenames)}\n\n" + 'caselist:\n' + '\n'.join(caselist) + '\n\n' + 'casenames:\n' + '\n'.join(casenames) + '\n' + ) + raise ValueError(err) + +cases = dict(zip(casenames, caselist)) +maxlength = max([len(c) for c in cases]) + +### Get the base cases +if not len(_basecase): + basecase = list(cases.keys())[0] +else: + basepath = [c for c in cases.values() if c.endswith(_basecase)] + if len(basepath) == 0: + err = ( + f"Use a basecase that matches one case.\nbasecase={_basecase} matches none of:\n" + + '\n'.join(basepath) + ) + raise ValueError(err) + elif len(basepath) > 1: + err = ( + f"Use a basecase that only matches one case.\nbasecase={_basecase} matches:\n" + + '\n'.join(basepath) + ) + raise ValueError(err) + else: + basepath = basepath[0] + ## basecase is the short name; basepath is the full path + basecase = casenames[caselist.index(basepath)] + ## Put it first in the list + cases = {**{basecase:cases[basecase]}, **{k:v for k,v in cases.items() if k != basecase}} + +## Make case->base dictionary +if use_table_bases: + basemap = dfcase.set_index('casename').base.to_dict() +else: + basemap = dict(zip(cases, [basecase]*len(cases))) + +## Get the colors +if use_table_colors: + colors = dict(zip(dfcase.casename, dfcase.color)) + for k, v in colors.items(): + if v.startswith('plt.cm.') or v.startswith('cmocean.cm.'): + colors[k] = eval(v) +else: + colors = plots.rainbowmapper(cases) + +## Arrange the maps +nrows, ncols, coords = plots.get_coordinates(cases, aspect=2) + +## Take a look +print('Analyzing the following cases:') +for case, path in cases.items(): + print( + f'{path} -> {case}' + + (' (base)' if ((not use_table_bases) and (case == basecase)) else '') + ) + +#%% Create output folder +firstcasepath = list(cases.values())[0] +outpath = os.path.join(firstcasepath, 'outputs', 'comparisons') os.makedirs(outpath, exist_ok=True) -print(f'Saving results to {outpath}') +## Remove disallowed characters and clip filename to max length +max_filename_length = 250 +savename = os.path.join( + outpath, + (f"results-{','.join(cases.keys())}" + .replace(':','').replace('/','').replace(' ','').replace('\\n','').replace('\n','') + [:max_filename_length-len('.pptx')]) + '.pptx' +) +print(f'Saving results to {savename}') #%% Create bokehpivot report as subprocess if not skipbp: @@ -104,171 +403,2230 @@ bp_path = f'{reeds_path}/postprocessing/bokehpivot' bp_py_file = f'{bp_path}/reports/interface_report_model.py' report_path = f'{bp_path}/reports/templates/reeds2/{bpreport}.py' - bp_outpath = f'{outpath}/{bpreport}-diff-with-{casebase_name}' + bp_outpath = f'{outpath}/{bpreport}-diff-multicase' add_diff = 'Yes' auto_open = 'Yes' + bp_colors = pd.read_csv(f'{bp_path}/in/example_reeds_scenarios.csv')['color'].tolist() + bp_colors = bp_colors*10 #Up to 200 scenarios + bp_colors = bp_colors[:len(casenames)] + df_scenarios = pd.DataFrame({'name':casenames, 'color':bp_colors, 'path':caselist}) + scenarios_path = f'{outpath}/scenarios.csv' + df_scenarios.to_csv(scenarios_path, index=False) call_str = ( - f'{start_str}python "{bp_py_file}" "ReEDS 2.0" "{casebase}|{casecomp}" all ' + - f'{add_diff} "{casebase_name}" "{report_path}" "html,excel" one "{bp_outpath}" {auto_open}' + f'{start_str}python "{bp_py_file}" "ReEDS 2.0" "{scenarios_path}" all ' + + f'{add_diff} "{basecase}" "{report_path}" "html,excel" one "{bp_outpath}" {auto_open}' ) sp.Popen(call_str, shell=True) -#%% Set up powerpoint file and default figure-adding approach -prs = pptx.Presentation(os.path.join(reeds_path,'postprocessing','template.pptx')) -blank_slide_layout = prs.slide_layouts[3] - -def add_to_pptx(title, left=0, top=0.62, width=13.33, height=None): - ## Add current matplotlib figure to new powerpoint slide - image = io.BytesIO() - plt.savefig(image, format='png') - slide = prs.slides.add_slide(blank_slide_layout) - slide.shapes.title.text = title - slide.shapes.add_picture( - image, - left=(None if left is None else Inches(left)), - top=(None if top is None else Inches(top)), - width=(None if width is None else Inches(width)), - height=(None if height is None else Inches(height)), - ) - return slide +#%%### Load data +#%% Shared +## Determine if we're on a branch before or after county-level capability was merged +countyreeds = ( + True if os.path.isfile(os.path.join(reeds_path,'inputs','transmission','r_rr_adj_county.csv')) + else False +) +if countyreeds: + hierarchy = pd.read_csv( + os.path.join(reeds_path,'inputs','hierarchy.csv') + ).drop(['county','county_name'], axis=1).drop_duplicates().rename(columns={'ba':'r'}).set_index('r') +else: + hierarchy = pd.read_csv( + os.path.join(reeds_path,'inputs','hierarchy.csv') + ).rename(columns={'*r':'r'}).set_index('r') +hierarchy = hierarchy.loc[hierarchy.country.str.lower()=='usa'].copy() -#%% Get the switches, overwriting values as necessary sw = pd.read_csv( - os.path.join(casebase, 'inputs_case', 'switches.csv'), + os.path.join(cases[case],'inputs_case','switches.csv'), header=None, index_col=0).squeeze(1) -sw['reeds_path'] = reeds_path -### Get the solve years -years = pd.read_csv( - os.path.join(casebase, 'inputs_case', 'modeledyears.csv') -).columns.astype(int).tolist() +scalars = pd.read_csv( + os.path.join(cases[case], 'inputs_case', 'scalars.csv'), + header=None, usecols=[0,1], index_col=0).squeeze(1) +phaseout_trigger = float(scalars.co2_emissions_2022) * float(sw.GSw_TCPhaseout_trigger_f) + +inflatable = reedsplots.get_inflatable(os.path.join( + reeds_path,'inputs','financials','inflation_default.csv')) +inflator = inflatable[reeds_dollaryear, output_dollaryear] + +scghg = pd.read_csv( + os.path.join(reeds_path, 'postprocessing', 'plots', 'scghg_annual.csv'), + comment='#', thousands=',' +).rename(columns={ + 'gas':'e', + 'emission.year':'t', + '2.5% Ramsey':'2020_2.5%', + '2.0% Ramsey':'2020_2.0%', + '1.5% Ramsey':'2020_1.5%', +}).set_index(['e','t']) +scghg_central = ( + scghg[f'2020_{discountrate_scghg*100:.1f}%'].unstack('e') + * inflatable[2020, output_dollaryear] +) + +#%% Colors +bokehcostcolors = pd.read_csv( + os.path.join( + reeds_path,'postprocessing','bokehpivot','in','reeds2','cost_cat_style.csv'), + index_col='order').squeeze(1) +bokehcostcolors = bokehcostcolors.loc[~bokehcostcolors.index.duplicated()] + +colors_time = pd.read_csv( + os.path.join( + reeds_path,'postprocessing','bokehpivot','in','reeds2','process_style.csv'), + index_col='order', +).squeeze(1) + +bokehcolors = pd.read_csv( + os.path.join(reeds_path,'postprocessing','bokehpivot','in','reeds2','tech_style.csv'), + index_col='order').squeeze(1) + +tech_map = pd.read_csv( + os.path.join(reeds_path,'postprocessing','bokehpivot','in','reeds2','tech_map.csv'), + index_col='raw').squeeze(1) + +bokehcolors = pd.concat([ + bokehcolors.loc['smr':'electrolyzer'], + pd.Series('#D55E00', index=['dac'], name='color'), + bokehcolors.loc[:'Canada'], +]) + +bokehcolors['canada'] = bokehcolors['Canada'] + +techcolors = { + 'gas-cc_gas-cc-ccs':bokehcolors['gas-cc-ccs_mod'], + 'cofire':bokehcolors['biopower'], + 'gas-cc':'#5E1688', + 'gas-cc-ccs':'#9467BD', +} +for i in bokehcolors.index: + if i in techcolors: + pass + elif i in bokehcolors.index: + techcolors[i] = bokehcolors[i] + else: + raise Exception(i) + +techcolors = {i: techcolors[i] for i in bokehcolors.index} -### Parse the year input -t = year if (year in years) else years[-1] -sw['t'] = t +trtype_map = pd.read_csv( + os.path.join(reeds_path,'postprocessing','bokehpivot','in','reeds2','trtype_map.csv'), + index_col='raw')['display'] +colors_trans = pd.read_csv( + os.path.join(reeds_path,'postprocessing','bokehpivot','in','reeds2','trtype_style.csv'), + index_col='order')['color'] -#%%### Make the annual difference bar plots -print('base:', casebase) -print('comp:', casecomp) -for val in plotvals: +#%% Parse excel report sheet names +val2sheet = reedsplots.get_report_sheetmap(cases[basecase]) + +#%% Read input files +renametechs = { + 'h2-cc_upgrade':'h2-cc', + 'h2-ct_upgrade':'h2-ct', + 'gas-cc-ccs_mod_upgrade':'gas-cc-ccs_mod', + 'coal-ccs_mod_upgrade':'coal-ccs_mod', +} +dictin_sw = { + case: pd.read_csv( + os.path.join(cases[case],'inputs_case','switches.csv'), + header=None, index_col=0).squeeze(1) + for case in cases +} + +dictin_cap = {} +for case in tqdm(cases, desc='national capacity'): + dictin_cap[case] = pd.read_excel( + os.path.join(cases[case],'outputs','reeds-report','report.xlsx'), + sheet_name=val2sheet['Capacity (GW)'], + ).drop('scenario',axis=1) + ### Simplify techs + dictin_cap[case].tech = dictin_cap[case].tech.map(lambda x: renametechs.get(x,x)) + dictin_cap[case] = ( + dictin_cap[case].groupby(['tech','year'], as_index=False) + ['Capacity (GW)'].sum()) + dictin_cap[case] = dictin_cap[case].loc[ + ~dictin_cap[case].tech.isin(['electrolyzer','smr','smr-ccs'])].copy() + +dictin_gen = {} +for case in tqdm(cases, desc='national generation'): + dictin_gen[case] = pd.read_excel( + os.path.join(cases[case],'outputs','reeds-report','report.xlsx'), + sheet_name=val2sheet['Generation (TWh)'], + ).drop('scenario',axis=1) + ### Simplify techs + dictin_gen[case].tech = dictin_gen[case].tech.map(lambda x: renametechs.get(x,x)) + dictin_gen[case] = ( + dictin_gen[case].groupby(['tech','year'], as_index=False) + ['Generation (TWh)'].sum()) + +costcat_rename = { + 'CO2 Spurline':'CO2 T&S Capex', + 'CO2 Pipeline':'CO2 T&S Capex', + 'CO2 Storage':'CO2 T&S Capex', + 'CO2 Spurline FOM':'CO2 T&S O&M', + 'CO2 Pipeline FOM':'CO2 T&S O&M', + 'CO2 Incentive Payments':'CCS Incentives', + 'Capital': 'Gen & Stor Capex', + 'O&M': 'Gen & Stor O&M', + 'CO2 Network':'CO2 T&S Capex', + 'CO2 Incentives':'CCS Incentives', + 'CO2 FOM':'CO2 T&S O&M', + 'CO2 Capture':'CO2 T&S Capex', + 'H2 Fuel':'Fuel', + 'H2 VOM':'H2 Prod O&M', +} +dictin_npv = {} +for case in tqdm(cases, desc='NPV of system cost'): + dictin_npv[case] = pd.read_excel( + os.path.join(cases[case],'outputs','reeds-report','report.xlsx'), + sheet_name=val2sheet['Present Value of System Cost'], engine='openpyxl', + ).drop('scenario',axis=1).set_index('cost_cat')['Discounted Cost (Bil $)'] + dictin_npv[case].index = pd.Series(dictin_npv[case].index).replace(costcat_rename) + dictin_npv[case] = dictin_npv[case].groupby(level=0, sort=False).sum() + +dictin_scoe = {} +for case in tqdm(cases, desc='SCOE'): + dictin_scoe[case] = pd.read_excel( + os.path.join(cases[case],'outputs','reeds-report','report.xlsx'), + sheet_name=val2sheet['National Average Electricity'], engine='openpyxl', + ).drop('scenario',axis=1) + dictin_scoe[case].cost_cat = dictin_scoe[case].cost_cat.replace( + {**costcat_rename,**{'CO2 Incentives':'CCS Incentives'}}) + dictin_scoe[case] = ( + dictin_scoe[case].groupby(['cost_cat','year'], sort=False, as_index=False) + ['Average cost ($/MWh)'].sum()) + +dictin_syscost = {} +for case in tqdm(cases, desc='annual system cost'): + dictin_syscost[case] = pd.read_excel( + os.path.join(cases[case],'outputs','reeds-report','report.xlsx'), + sheet_name=val2sheet['Undiscounted Annualized Syst'], engine='openpyxl', + ).drop('scenario',axis=1) + dictin_syscost[case].cost_cat = dictin_syscost[case].cost_cat.replace( + {**costcat_rename,**{'CO2 Incentives':'CCS Incentives'}}) + dictin_syscost[case] = ( + dictin_syscost[case].groupby(['cost_cat','year'], sort=False) + ['Cost (Bil $)'].sum().unstack('cost_cat')) + +dictin_emissions = {} +for case in tqdm(cases, desc='national emissions'): + dictin_emissions[case] = pd.read_csv( + os.path.join(cases[case], 'outputs', 'emit_nat.csv'), + header=0, names=['e','t','tonne'], index_col=['e','t'], + ).squeeze(1).unstack('e') + +dictin_trans = {} +for case in tqdm(cases, desc='national transmission'): + dictin_trans[case] = pd.read_excel( + os.path.join(cases[case],'outputs','reeds-report','report.xlsx'), + sheet_name=val2sheet['Transmission (GW-mi)'], engine='openpyxl', + ).drop('scenario',axis=1) + +dictin_trans_r = {} +for case in tqdm(cases, desc='regional transmission'): + dictin_trans_r[case] = pd.read_csv( + os.path.join(cases[case],'outputs','tran_out.csv') + ).rename(columns={'Value':'MW'}) + for _level in ['interconnect','transreg','transgrp','st']: + dictin_trans_r[case][f'inter_{_level}'] = ( + dictin_trans_r[case].r.map(hierarchy[_level]) + != dictin_trans_r[case].rr.map(hierarchy[_level]) + ).astype(int) + +dictin_cap_r = {} +for case in tqdm(cases, desc='regional capacity'): + dictin_cap_r[case] = pd.read_csv( + os.path.join(cases[case],'outputs','cap.csv'), + names=['i','r','t','MW'], header=0, + ) + ### Simplify techs + dictin_cap_r[case].i = dictin_cap_r[case].i.map(lambda x: renametechs.get(x,x)) + dictin_cap_r[case].i = dictin_cap_r[case].i.str.lower().map(lambda x: techmap.get(x,x)) + dictin_cap_r[case] = dictin_cap_r[case].groupby(['i','r','t'], as_index=False).MW.sum() + +dictin_cap_firm = {} +for case in tqdm(cases, desc='firm capacity'): + dictin_cap_firm[case] = pd.read_csv( + os.path.join(cases[case],'outputs','cap_firm.csv'), + names=['i','r','ccseason','t','MW'], header=0, + ) + ### Simplify techs + dictin_cap_firm[case].i = reedsplots.simplify_techs(dictin_cap_firm[case].i) + dictin_cap_firm[case] = dictin_cap_firm[case].groupby(['i','r','ccseason','t'], as_index=False).MW.sum() + +dictin_runtime = {} +for case in tqdm(cases, desc='runtime'): + dictin_runtime[case] = pd.read_excel( + os.path.join(cases[case],'outputs','reeds-report','report.xlsx'), + sheet_name=val2sheet['Runtime by year (hours)'], engine='openpyxl', + )[['process','year','processtime']] + +dictin_neue = {} +for case in tqdm(cases, desc='NEUE'): + infiles = sorted(glob(os.path.join(cases[case],'outputs','neue_*.csv'))) + if not len(infiles): + continue + df = {} + for f in infiles: + y, i = [int(s) for s in os.path.basename(f).strip('neue_.csv').split('i')] + df[y,i] = pd.read_csv(f) + df[y,i] = df[y,i].loc[ + (df[y,i].level=='country') & (df[y,i].metric=='sum'), + 'NEUE_ppm' + ].iloc[0] + df = ( + pd.Series(df, name='NEUE [ppm]').rename_axis(['t','iteration']) + .sort_index().reset_index() + .drop_duplicates(subset='t', keep='last') + .set_index('t')['NEUE [ppm]'] + ) + dictin_neue[case] = df + +dictin_health = {} +dictin_health_central = {} +dictin_health_central_mort = {} +for case in tqdm(cases, desc='health'): try: + dictin_health[case] = pd.read_csv( + os.path.join(cases[case], 'outputs', 'health_damages_caused_r.csv'), + header=0, + ).groupby(['year','pollutant','model','cr']).sum() + except FileNotFoundError: + print(case) + +dictin_health_central = { + case: ( + dictin_health[case] + .xs(central_health['cr'], level='cr') + .xs(central_health['model'], level='model') + .groupby('year').sum() + ['damage_$'] + ### Inflate from reeds_dollaryear (2004) to bokeh output_dollaryear (2021) + * inflator + ### Convert to $B + / 1e9 + ) + for case in dictin_health +} +dictin_health_central_mort = { + case: ( + dictin_health[case] + .xs(central_health['cr'], level='cr') + .xs(central_health['model'], level='model') + .groupby('year').sum() + ['mortality'] + ) + for case in dictin_health +} + + +#%% Detailed inputs +if detailed: + ### Timeslice generation by region + dictin_gen_h = {} + for case in tqdm(cases, desc='gen_h'): + dictin_gen_h[case] = pd.read_csv( + os.path.join(cases[case],'outputs','gen_h.csv'), + ).rename(columns={'Value':'GW','allh':'h'}) + dictin_gen_h[case].GW /= 1e3 + dictin_gen_h[case].i = reedsplots.simplify_techs(dictin_gen_h[case].i) + dictin_gen_h[case] = dictin_gen_h[case].groupby(['i','r','h','t'], as_index=False).GW.sum() + ## Separate charge and discharge + dictin_gen_h[case].loc[ + (dictin_gen_h[case].i.str.startswith('battery') + | dictin_gen_h[case].i.str.startswith('pumped-hydro')) + & (dictin_gen_h[case].GW < 0), + 'i' + ] += '|charge' + dictin_gen_h[case].loc[ + (dictin_gen_h[case].i.str.startswith('battery') + | dictin_gen_h[case].i.str.startswith('pumped-hydro')) + & (~dictin_gen_h[case].i.str.endswith('|charge')), + 'i' + ] += '|discharge' + + ### Aggregated generation by region + dictin_gen_h_twh = {} + for case in tqdm(dictin_gen_h): + numhours = pd.read_csv( + os.path.join(cases[case],'inputs_case','numhours.csv'), + ).rename(columns={'*h':'h'}).set_index('h').squeeze(1) + + dictin_gen_h_twh[case] = dictin_gen_h[case].copy() + dictin_gen_h_twh[case]['TWh'] = ( + dictin_gen_h_twh[case]['GW'] * dictin_gen_h_twh[case]['h'].map(numhours) + / 1e3 + ).round(3) + + dictin_gen_h_twh[case] = dictin_gen_h_twh[case].groupby(['t','i']).TWh.sum().unstack('i') + + ### Stress period dispatch + dictin_gen_h_stress = {} + for case in tqdm(cases, desc='gen_h_stress'): + dictin_gen_h_stress[case] = pd.read_csv( + os.path.join(cases[case],'outputs','gen_h_stress.csv'), + ).rename(columns={'Value':'GW', 'allh':'h'}) + dictin_gen_h_stress[case].GW /= 1e3 + dictin_gen_h_stress[case].i = reedsplots.simplify_techs(dictin_gen_h_stress[case].i) + ## Separate charge and discharge + dictin_gen_h_stress[case].loc[dictin_gen_h_stress[case].GW < 0,'i'] += '|charge' + dictin_gen_h_stress[case].loc[dictin_gen_h_stress[case].i.isin( + ['battery_4','battery_8','pumped-hydro']),'i'] += '|discharge' + + ### Stress period flows + dictin_tran_flow_stress = {} + for case in tqdm(cases, desc='tran_flow_stress'): + dictin_tran_flow_stress[case] = pd.read_csv( + os.path.join(cases[case],'outputs','tran_flow_stress.csv'), + ).rename(columns={'Value':'GW', 'allh':'h'}) + dictin_tran_flow_stress[case].GW /= 1e3 + + ### Stress period load + dictin_load_stress = {} + for case in tqdm(cases, desc='load_stress'): + dictin_load_stress[case] = pd.read_csv( + os.path.join(cases[case],'outputs','load_stress.csv'), + ).rename(columns={'Value':'GW', 'allh':'h'}) + dictin_load_stress[case].GW /= 1e3 + + ### Peak load (for capacity credit) + distloss = 0.05 + dictin_peak_ccseason = {} + for case in tqdm(cases, desc='peak_ccseason'): + dictin_peak_ccseason[case] = pd.read_csv( + os.path.join(cases[case],'inputs_case','peak_ccseason.csv'), + ).rename(columns={'*r':'r', 'MW':'GW'}) + dictin_peak_ccseason[case].GW /= (1e3 * (1 - distloss)) + + ### Capacity credit PRMTRADE + dictin_prmtrade = {} + for case in tqdm(cases, desc='prmtrade'): + dictin_prmtrade[case] = pd.read_csv( + os.path.join(cases[case],'outputs','captrade.csv'), + header=0, names=['r','rr','trtype','ccseason','t','MW'] + ).rename(columns={'MW':'GW'}) + dictin_prmtrade[case].GW /= 1e3 + + +#%% Model years and discount rates +years = sorted(dictin_cap[case].year.astype(int).unique()) +years = [y for y in years if y >= startyear] +yearstep = years[-1] - years[-2] +lastyear = max(years) +## Years for which to add data notes +startyear_sums = 2023 +allyears = range(startyear_sums,lastyear+1) +noteyears = [2035, 2050] +if all([lastyear < y for y in noteyears]): + noteyears = [lastyear] +startyear_growth = 2035 + +discounts = pd.Series( + index=range(startyear_notes,lastyear+1), + data=[1/(1+discountrate_social)**(y-startyear_notes) + for y in range(startyear_notes,lastyear+1)] +).rename_axis('t') + + +#%%### Plots ###### +### Set up powerpoint file +prs = pptx.Presentation(os.path.join(reeds_path,'postprocessing','template.pptx')) +blank_slide_layout = prs.slide_layouts[3] + + +#%%### Generation capacity lines +aggtechsplot = { + 'Interregional\ntransmission': 'inter_transreg', + 'Land-based\nwind': ['wind-ons'], + 'Offshore\nwind': ['wind-ofs'], + # 'Wind': ['wind-ons', 'wind-ofs'], + 'Solar': ['upv', 'dupv', 'distpv', 'csp', 'pvb'], + 'Battery': ['battery_{}'.format(i) for i in [2,4,6,8,10]], + 'Pumped\nstorage\nhydro': ['pumped-hydro'], + # 'Storage': ['battery_{}'.format(i) for i in [2,4,6,8,10]] + ['pumped-hydro'], + 'Hydro, geo, bio': [ + 'hydro','geothermal', + 'biopower','lfill-gas','cofire','beccs_mod','beccs' + ], + 'Nuclear': ['nuclear', 'nuclear-smr'], + 'Hydrogen\nturbine': ['h2-cc', 'h2-cc-upgrade', 'h2-ct', 'h2-ct-upgrade'], + 'Gas CCS': ['gas-cc-ccs_mod'], + 'Coal CCS': ['coal-ccs_mod'], + # 'Fossil\n(with CCS)': ['gas-cc-ccs_mod','coal-ccs_mod'], + 'Fossil\n(w/o CCS)': ['gas-cc', 'gas-ct', 'o-g-s', 'coal', 'cofire'], +# 'CDR': ['dac', 'beccs'], +# 'H2 production': ['smr', 'smr-ccs', 'electrolyzer'], +} +checktechs = [i for sublist in aggtechsplot.values() for i in sublist] +alltechs = pd.concat(dictin_cap).tech.unique() +printstring = ( + 'The following techs are not plotted: ' + + ', '.join([c for c in alltechs if c not in checktechs]) +) + +offsetstart = { + 'Solar': (15,0), + 'Wind': (15,0), 'Land-based\nwind': (15,0), +} + +val = '4_Capacity (GW)' +ycol = 'Capacity (GW)' +techrows, techcols = 2, len(aggtechsplot)//2+len(aggtechsplot)%2 +techcoords = dict(zip( + list(aggtechsplot.keys()), + [(row,col) for row in range(techrows) for col in range(techcols)] +)) + +offset = dict() + +plt.close() +f,ax = plt.subplots( + techrows, techcols, sharex=True, sharey=True, + figsize=(13.33, 6.88), + gridspec_kw={'wspace':0.3, 'hspace':0.15}, +) +df = {} +for tech in aggtechsplot: + for case in cases: + ### Central cases + if 'transmission' in tech.lower(): + df[tech,case] = dictin_trans_r[case].loc[ + dictin_trans_r[case][aggtechsplot[tech]]==1 + ].groupby('t').MW.sum() / 1e3 + else: + df[tech,case] = dictin_cap[case].loc[ + dictin_cap[case].tech.isin(aggtechsplot[tech]) + ].groupby('year')[ycol].sum().reindex(years).fillna(0) + ax[techcoords[tech]].plot( + df[tech,case].index, df[tech,case].values, + label=case, color=colors[case], ls='-', + ) + ### Annotate the last value (with overlaps) + fincap = df[tech,case].reindex([lastyear]).fillna(0).squeeze() + ax[techcoords[tech]].annotate( + ' {:.0f}'.format(fincap), + (lastyear, fincap+offset.get((tech,case),0)), + ha='left', va='center', + color=colors[case], fontsize='small', + annotation_clip=False, + ) + ### Formatting + ax[techcoords[tech]].xaxis.set_minor_locator(mpl.ticker.MultipleLocator(5 if lastyear>2040 else 1)) + ax[techcoords[tech]].xaxis.set_major_locator(mpl.ticker.MultipleLocator(10 if lastyear>2040 else 5)) + ax[techcoords[tech]].annotate( + tech, + (0.05,1.0), va='top', ha='left', + xycoords='axes fraction', + fontsize='x-large', weight='bold',) + ### Annotate the 2020 value + plots.annotate( + ax[techcoords[tech]], basecase, + startyear, offsetstart.get(tech,(10,10)), color='C7', + arrowprops={'arrowstyle':'-|>', 'color':'C7'}) +if len(aggtechsplot) % 2: + ax[-1,-1].axis('off') +## Legend +handles, labels = ax[-1,0].get_legend_handles_labels() +leg = ax[0,-1].legend( + handles, labels, + fontsize='large', frameon=False, + loc='center left', bbox_to_anchor=(1.1,-0.075), + handletextpad=0.3, handlelength=0.7, + ncol=1, +) +for legobj in leg.legend_handles: + legobj.set_linewidth(8) + legobj.set_solid_capstyle('butt') +ax[techcoords[list(aggtechsplot.keys())[0]]].set_xlim(startyear,lastyear) +ax[techcoords[list(aggtechsplot.keys())[0]]].set_ylim(0) +ax[techcoords[list(aggtechsplot.keys())[0]]].set_ylabel('Capacity [GW]', y=-0.075) +# for row in range(techrows): +# ax[row,0].set_ylabel('Capacity [GW]') +ax[techcoords[list(aggtechsplot.keys())[0]]].set_ylim(0) +# ## Annotate the last value (without overlaps) +# ymax = ax[techcoords[list(aggtechsplot.keys())[0]]].get_ylim()[1] +# df = pd.concat(df, axis=1) +# for tech in aggtechsplot: +# plots.label_last( +# df[tech], ax[techcoords[tech]], colors=colors, extend='both', +# mindistance=ymax*0.05, fontsize='small', +# ) + +plots.despine(ax) +plt.draw() +plots.shorten_years(ax[1,0]) +### Save it +slide = add_to_pptx('Capacity') +add_textbox(printstring, slide) +if interactive: + print(printstring) + plt.show() + + +#%%### Capacity and generation bars +if len(cases) == 2: + casebase, casecomp = list(cases.values()) + casebase_name, casecomp_name = list(cases.keys()) + for val in plotdiffvals: + try: + plt.close() + f, ax, leg, dfdiff, printstring = reedsplots.plotdiff( + val, casebase=casebase, casecomp=casecomp, + casebase_name=casebase_name, casecomp_name=casecomp_name, + onlytechs=onlytechs, titleshorten=titleshorten, + yearmin=(2025 if 'NEUE' in val else startyear), yearmax=lastyear, + # plot_kwds={'figsize':(4,4), 'gridspec_kw':{'wspace':0.7}}, + ) + slide = add_to_pptx(val, verbose=0) + textbox = slide.shapes.add_textbox( + left=Inches(0), top=Inches(7), + width=Inches(13.33), height=Inches(0.5)) + textbox.text_frame.text = printstring + if interactive: + plt.show() + except Exception as err: + print(err) +else: + toplot = { + 'Capacity': {'data': dictin_cap, 'colors':techcolors, 'columns':'tech', 'values':'Capacity (GW)', 'label':'Capacity [GW]'}, + 'Generation': {'data': dictin_gen, 'colors':techcolors, 'columns':'tech', 'values':'Generation (TWh)', 'label':'Generation [TWh]'}, + 'Runtime': {'data': dictin_runtime, 'colors':colors_time.to_dict(), 'columns':'process', 'values':'processtime', 'label':'Runtime [hours]'}, + } + plotwidth = 2.0 + figwidth = plotwidth * len(cases) + dfbase = {} + for slidetitle, data in toplot.items(): plt.close() - f, ax, leg, dfdiff, printstring = reedsplots.plotdiff( - val, casebase, casecomp, onlytechs=None, titleshorten=titleshorten, - yearmin=(2025 if 'NEUE' in val else yearmin), yearmax=yearmax, - # plot_kwds={'figsize':(4,4), 'gridspec_kw':{'wspace':0.7}}, + f,ax = plt.subplots( + 2, len(cases), figsize=(figwidth, 6.8), + sharex=True, sharey=sharey, dpi=None, + ) + ax[0,0].set_ylabel(data['label'], y=-0.075) + ax[0,0].set_xlim(2017.5, lastyear+2.5) + ax[1,0].annotate( + f'Diff\nfrom\n{basecase}', (0.03,0.03), xycoords='axes fraction', + fontsize='x-large', weight='bold') + ###### Absolute + alltechs = set() + for col, case in enumerate(cases): + if case not in data['data']: + continue + dfplot = data['data'][case].pivot(index='year', columns=data['columns'], values=data['values']) + dfplot = ( + dfplot[[c for c in data['colors'] if c in dfplot]] + .round(3).replace(0,np.nan) + .dropna(axis=1, how='all') + ) + if case == basecase: + dfbase[slidetitle] = dfplot.copy() + alltechs.update(dfplot.columns) + plots.stackbar(df=dfplot, ax=ax[0,col], colors=data['colors'], width=yearstep, net=False) + ax[0,col].set_title( + plots.wraptext(case, width=plotwidth+0.1, fontsize=14), + fontsize=14, weight='bold', x=0, ha='left', pad=8,) + ax[0,col].xaxis.set_major_locator(mpl.ticker.MultipleLocator(10)) + ax[0,col].xaxis.set_minor_locator(mpl.ticker.MultipleLocator(5)) + + + ### Legend + handles = [ + mpl.patches.Patch( + facecolor=data['colors'][i], edgecolor='none', + label=i.replace('Canada','imports').split('/')[-1] + ) + for i in data['colors'] if i in alltechs + ] + leg = ax[0,-1].legend( + handles=handles[::-1], loc='upper left', bbox_to_anchor=(1.0,1.0), + fontsize='medium', ncol=1, frameon=False, + handletextpad=0.3, handlelength=0.7, columnspacing=0.5, ) - slide = add_to_pptx(val) - textbox = slide.shapes.add_textbox( - left=Inches(0), top=Inches(7), - width=Inches(13.33), height=Inches(0.5)) - textbox.text_frame.text = printstring + + ###### Difference + for col, case in enumerate(cases): + ax[1,col].xaxis.set_major_locator(mpl.ticker.MultipleLocator(10)) + ax[1,col].xaxis.set_minor_locator(mpl.ticker.MultipleLocator(5)) + ax[1,col].axhline(0,c='k',ls='--',lw=0.75) + + if (case not in data['data']) or (case == basecase): + continue + dfplot = data['data'][case].pivot(index='year', columns=data['columns'], values=data['values']) + dfplot = ( + dfplot + .round(3).replace(0,np.nan) + .dropna(axis=1, how='all') + ) + dfplot = dfplot.subtract(dfbase[slidetitle], fill_value=0) + dfplot = dfplot[[c for c in data['colors'] if c in dfplot]].copy() + alltechs.update(dfplot.columns) + plots.stackbar(df=dfplot, ax=ax[1,col], colors=data['colors'], width=yearstep, net=True) + + plots.despine(ax) + plt.draw() + plots.shorten_years(ax[1,0]) + ### Save it + slide = add_to_pptx(slidetitle+' stack', width=min(figwidth, 13.33)) if interactive: plt.show() - except Exception as err: - print(err) -#%%### Transmission diff map +#%% Alternate view: Stacks with bars labeled +barwidth = 0.35 +labelpad = 0.08 +width = 1.6*len(cases) + 0.5 +aggstack = { + **{f'battery_{i}':'Storage' for i in [2,4,6,8,10]}, + **{f'battery_{i}|charge':'Storage|charge' for i in [2,4,6,8,10]}, + **{f'battery_{i}|discharge':'Storage|discharge' for i in [2,4,6,8,10]}, + **{ + 'pumped-hydro':'Storage', + 'pumped-hydro|charge':'Storage|charge', 'pumped-hydro|discharge':'Storage|discharge', + + 'h2-cc':'H2 turbine', 'h2-ct':'H2 turbine', + + 'beccs_mod':'Bio/BECCS', + 'biopower':'Bio/BECCS', 'lfill-gas':'Bio/BECCS', 'cofire':'Bio/BECCS', + + 'gas-cc-ccs_mod':'Gas+CCS', + 'coal-ccs_mod':'Coal+CCS', + 'gas-cc':'Gas', 'gas-ct':'Gas', 'o-g-s':'Gas', + 'coal':'Coal', + + 'Canada':'Canadian imports', 'canada':'Canadian imports', + + 'hydro':'Hydro', + 'geothermal':'Geothermal', + + 'csp':'CSP', + 'upv':'PV', 'dupv':'PV', 'distpv':'PV', + 'pvb':'PVB', + + 'wind-ofs':'Offshore wind', + 'wind-ons':'Land-based wind', + + 'nuclear':'Nuclear', 'nuclear-smr':'Nuclear', + } +} +aggcolors = { + 'Nuclear':'C3', + + 'Coal':plt.cm.binary(1.0), + 'Gas':plt.cm.tab20(8), + 'Coal+CCS':'C7', + 'Gas+CCS':plt.cm.tab20(9), + + 'Hydro': techcolors['hydro'], + 'Geothermal': techcolors['geothermal'], + 'Canadian imports': techcolors['dr'], + + # 'Bio/BECCS':plt.cm.tab20(4), + # 'H2 turbine':plt.cm.tab20(5), + 'Bio/BECCS':techcolors['biopower'], + 'H2 turbine':techcolors['h2-ct'], + + 'Land-based wind':techcolors['wind-ons'], + 'Offshore wind':techcolors['wind-ofs'], + + 'CSP':techcolors['csp'], + 'PV':techcolors['upv'], + 'PVB':techcolors['pvb'], + + # 'Storage':plt.cm.tab20(12), + # 'Storage|charge':plt.cm.tab20(12), + # 'Storage|discharge':plt.cm.tab20(12), + 'Storage':techcolors['battery_8'], + 'Storage|charge':techcolors['battery_8'], + 'Storage|discharge':techcolors['battery_8'], +} +aggtechs_disagg = aggstack.copy() +for k,v in aggstack.items(): + if v == 'Storage': + aggtechs_disagg[k+'|charge'] = 'Storage|charge' + aggtechs_disagg[k+'|discharge'] = 'Storage|discharge' + + +if len(cases) <= 4: + plt.close() + f,ax = plt.subplots(figsize=(width, 5)) + + ### Final capacity and generation + datum = 'Capacity' + data = { + 'data': dictin_cap, + 'values':'Capacity (GW)', + 'label':f' {lastyear} Capacity [GW]', + } + ax.set_ylabel(data['label']) + dfplot = pd.concat( + {case: + data['data'][case].loc[data['data'][case].year==lastyear] + .set_index('tech')[data['values']] + for case in cases}, + axis=1, + ).T + dfplot = dfplot.rename(columns=aggstack).groupby(axis=1, level='tech').sum() + unmapped = [c for c in dfplot if c not in aggcolors] + if len(unmapped): + raise Exception(f"Unmapped techs: {unmapped}") + dfplot = ( + dfplot[[c for c in aggcolors if c in dfplot]] + .round(3).replace(0,np.nan).dropna(axis=1, how='all').fillna(0) + ) + mindistance = dfplot.sum(axis=1).max() / 20 + dfcumsum = dfplot.cumsum(axis=1) + dfdiff = dfplot - dfplot.loc[dfplot.index.map(basemap)].values + + ## Absolute and difference + plots.stackbar(df=dfplot, ax=ax, colors=aggcolors, width=barwidth, net=False) + + ### Labels + for x, case in enumerate(cases): + labels = (dfcumsum.loc[case] - dfplot.loc[case]/2).rename('middle').to_frame() + try: + labels['ylabel'] = plots.optimize_label_positions( + ydata=labels.middle.values, mindistance=mindistance, ypad=0, + ) + except ValueError: + labels['ylabel'] = labels['middle'].values + labels['yval'] = labels.index.map(dfplot.loc[case]) + for i, row in labels.iterrows(): + ## Draw the line + ax.annotate( + '', + xy=(x+barwidth/2, row.middle), + xytext=(x+barwidth/2+labelpad, row.ylabel), + arrowprops={ + 'arrowstyle':'-', 'shrinkA':0, 'shrinkB':0, + 'color':aggcolors[i], 'lw':0.5}, + annotation_clip=False, + ) + ## Write the label + diff = np.around(dfdiff.loc[case,i], 0) + ax.annotate( + # f"{row.yval:.0f} {i}" + (f" ({diff:+.0f})" if diff else ''), + ( + f"{row.yval:.0f}" + + (f" {i}" if case == list(cases.keys())[-1] else '') + + (f" ({diff:+.0f})" if diff else '') + ), + (x+barwidth/2+labelpad+0.01, row.ylabel), + va='center', ha='left', fontsize=9, color=aggcolors[i], + weight=('bold' if abs(diff) >= 100 else 'normal'), + annotation_clip=False, + ) + + ### Formatting + ax.set_xticks(range(len(cases))) + ax.set_xticklabels(cases.keys(), rotation=45, rotation_mode='anchor', ha='right') + ax.yaxis.set_minor_locator(mpl.ticker.AutoMinorLocator(5)) + plt.tight_layout() + plots.despine(ax) + plt.draw() + ### Save it + slide = add_to_pptx('Capacity stacks', width=width) + if interactive: + plt.show() + + +#%%### Hodgepodge: Final capacity, final generation, final transmission, runtime +width = max(11, len(cases)*1.3) +plt.close() +f,ax = plt.subplots( + 2, 4, figsize=(width, 6.88), sharex=True, + sharey=('col' if (sharey is True) else False), +) +handles = {} + +### Final capacity and generation +toplot = { + 'Capacity': { + 'data': dictin_cap, + 'values':'Capacity (GW)', + 'label':f'{lastyear} Capacity [GW]'}, + 'Generation': { + 'data': dictin_gen, + 'values':'Generation (TWh)', + 'label':f'{lastyear} Generation [TWh]'}, +} +ax[0,1].axhline(0, c='k', ls='--', lw=0.75) +for col, (datum, data) in enumerate(toplot.items()): + ax[0,col].set_ylabel(data['label'], y=-0.075) + dfplot = pd.concat( + {case: + data['data'][case].loc[data['data'][case].year==lastyear] + .set_index('tech')[data['values']] + for case in cases}, + axis=1, + ).T + dfplot = ( + dfplot[[c for c in bokehcolors.index if c in dfplot]] + .round(3).replace(0,np.nan).dropna(axis=1, how='all') + ) + + handles[datum] = plot_bars_abs_stacked( + dfplot=dfplot, basecase=basemap, + colors=techcolors, fontsize=8, + ax=ax, col=col, net=(True if datum == 'Generation' else False), label=True, + ) + +### Total transmission +col = 2 +ax[0,col].set_ylabel(f'{lastyear} Transmission capacity [TW-mi]', y=-0.075) +dftrans = pd.concat({ + case: + dictin_trans[case].groupby(['year','trtype'])['Amount (GW-mi)'].sum() + .unstack('trtype') + .reindex(allyears).interpolate('linear') + / 1e3 + for case in cases +}, axis=1).loc[lastyear].unstack('trtype') + +handles['Transmission'] = plot_bars_abs_stacked( + dfplot=dftrans, basecase=basemap, + colors=colors_trans, + ax=ax, col=col, net=False, label=True, +) + +### Runtime +col = 3 +ax[0,col].set_ylabel('Runtime [hours]', y=-0.075) +dfplot = pd.concat( + {case: dictin_runtime[case].groupby('process').processtime.sum() for case in cases}, + axis=1).T +dfplot = dfplot[[c for c in colors_time.index if c in dfplot]].copy() + +handles['Runtime'] = plot_bars_abs_stacked( + dfplot=dfplot, basecase=basemap, + colors=colors_time, + ax=ax, col=col, net=False, label=True, +) + +### Formatting +for col in range(4): + ax[1,col].set_xticks(range(len(cases))) + ax[1,col].set_xticklabels(cases.keys(), rotation=90) + ax[1,col].annotate('Diff', (0.03,0.03), xycoords='axes fraction', fontsize='large') + ax[1,col].axhline(0, c='k', ls='--', lw=0.75) +plt.tight_layout() +plots.despine(ax) +plt.draw() +### Save it +slide = add_to_pptx('Capacity, generation, transmission, runtime', width=width) +if interactive: + plt.show() + +### Add legends as separate figure below the slide +plt.close() +f,ax = plt.subplots(1, 4, figsize=(11, 0.1)) +for col, datum in enumerate(handles): + leg = ax[col].legend( + handles=handles[datum][::-1], loc='upper center', bbox_to_anchor=(0.5,1.0), + fontsize='medium', ncol=1, frameon=False, + handletextpad=0.3, handlelength=0.7, columnspacing=0.5, + ) + ax[col].axis('off') +add_to_pptx(slide=slide, width=width, top=7.5) + + +#%% Costs: NPV of system cost, NPV of climate + health costs +simple_npv = False +width = max(11, len(cases)*1.3) +plt.close() +f,ax = plt.subplots( + 2, 3, figsize=(width, 6), sharex=True, + sharey=('col' if (sharey is True) else False), +) +handles = {} + +### NPV of system cost +col = 0 +ax[0,col].set_ylabel('NPV of system cost [$B]', y=-0.075) +ax[0,col].axhline(0, c='k', ls='--', lw=0.75) +dfcost_npv = pd.concat(dictin_npv, axis=1).fillna(0).T +dfcost_npv = dfcost_npv[[c for c in bokehcostcolors.index if c in dfcost_npv]].copy() +if simple_npv: + dfcost_npv = dfcost_npv.sum(axis=1) + dfcost_npv = pd.concat([pd.Series({case:dfcost_npv[case]}, name=case).to_frame() for case in cases]) + +handles['NPV'] = plot_bars_abs_stacked( + dfplot=dfcost_npv, basecase=basemap, + colors=bokehcostcolors, + ax=ax, col=col, net=(not simple_npv), label=True, +) + +### NPV of climate and health costs +col = 1 +ax[0,col].set_ylabel('NPV of climate + health cost [$B]', y=-0.075) + +dfsocial = {} +for case in cases: + dfsocial[case] = ( + dictin_emissions[case].reindex(allyears).interpolate('linear') + * scghg_central + )[['CO2','CH4']].dropna() / 1e9 + dfsocial[case]['health'] = dictin_health_central[case].reindex(allyears).interpolate('linear') +dfsocial = pd.concat(dfsocial, axis=1) + +dfsocial_npv = dfsocial.multiply(discounts, axis=0).dropna().sum().unstack('e') + +handles['social'] = plot_bars_abs_stacked( + dfplot=dfsocial_npv, basecase=basemap, + colors=colors_social, + ax=ax, col=col, net=True, label=True, +) + +### Combined +col = 2 +ax[0,col].set_ylabel('NPV of system\n+ climate + health cost [$B]', y=-0.075) +dfcombo_npv = pd.concat([dfcost_npv, dfsocial_npv], axis=1) + +handles['combo'] = plot_bars_abs_stacked( + dfplot=dfcombo_npv, basecase=basemap, + colors={**bokehcostcolors.to_dict(), **colors_social}, + ax=ax, col=col, net=True, label=True, +) + +### Formatting +for col in range(3): + ax[1,col].set_xticks(range(len(cases))) + ax[1,col].set_xticklabels(cases.keys(), rotation=90) + ax[1,col].annotate('Diff', (0.03,0.03), xycoords='axes fraction', fontsize='large') + ax[1,col].axhline(0, c='k', ls='--', lw=0.75) + ## Add commas to y axis labels + if max([abs(i) for i in ax[0,col].get_ylim()]) >= 10000: + ax[0,col].yaxis.set_major_formatter(mpl.ticker.StrMethodFormatter('{x:,.0f}')) +plt.tight_layout() +plots.despine(ax) +plt.draw() +### Save it +slide = add_to_pptx('NPV of system, climate, health costs', width=width) +if interactive: + plt.show() + +### Add legends as separate figure below the slide +plt.close() +f,ax = plt.subplots(1, 4, figsize=(11, 0.1)) +for col, datum in enumerate(handles): + leg = ax[col].legend( + handles=handles[datum][::-1], loc='upper center', bbox_to_anchor=(0.5,1.0), + fontsize='medium', ncol=1, frameon=False, + handletextpad=0.3, handlelength=0.7, columnspacing=0.5, + ) +for col in range(4): + ax[col].axis('off') +add_to_pptx(slide=slide, width=width, top=7.5) + + +#%% Simplifed NPV +width = len(cases)*1.3 + 2 +plt.close() +f,ax = plt.subplots( + 2, 3, figsize=(width, 6), sharex=True, + sharey=('col' if (sharey is True) else False), +) +handles = {} + +### NPV of system cost +col = 0 +ax[0,col].set_ylabel('NPV of system cost [$B]', y=-0.075) +ax[0,col].axhline(0, c='k', ls='--', lw=0.75) +dfcost_npv = pd.concat(dictin_npv, axis=1).fillna(0).T.sum(axis=1) + +def twobars(dfplot, basecase, colors, ax, col=0, ypad=0.02): + if isinstance(basecase, str): + dfdiff = dfplot - dfplot.loc[basecase] + elif isinstance(basecase, list): + dfdiff = dfplot - dfplot.loc[basecase].values + elif isinstance(basecase, dict): + dfdiff = dfplot - dfplot.loc[basecase.values()].values + + for (row, df) in enumerate([dfplot, dfdiff]): + ax[row,col].bar( + range(len(df)), df.values, + color=[colors[c] for c in dfplot.index], + width=0.8, + ) + ymin, ymax = ax[row,col].get_ylim() + _ypad = (ymax - ymin) * ypad + if ymin < 0: + ax[row,col].set_ylim(ymin * (1+ypad)) + ## label net value + for x, case in enumerate(df.index): + val = df.loc[case].sum() + ax[row,col].annotate( + f'{val:.0f}', (x, val - _ypad), ha='center', va='top', color='k', size=9, + path_effects=[pe.withStroke(linewidth=2.0, foreground='w', alpha=0.7)], + ) + +twobars(dfplot=dfcost_npv, basecase=basemap, colors=colors, ax=ax, col=col) + +### NPV of climate and health costs +col = 1 +ax[0,col].set_ylabel('NPV of climate + health cost [$B]', y=-0.075) + +dfsocial = {} +for case in cases: + dfsocial[case] = ( + dictin_emissions[case].reindex(allyears).interpolate('linear') + * scghg_central + )[['CO2','CH4']].dropna() / 1e9 + dfsocial[case]['health'] = dictin_health_central[case].reindex(allyears).interpolate('linear') +dfsocial = pd.concat(dfsocial, axis=1) + +dfsocial_npv = dfsocial.multiply(discounts, axis=0).dropna().sum().unstack('e').sum(axis=1) + +twobars(dfplot=dfsocial_npv, basecase=basemap, colors=colors, ax=ax, col=col) + +### Combined +col = 2 +ax[0,col].set_ylabel('NPV of system\n+ climate + health cost [$B]', y=-0.075) +dfcombo_npv = pd.concat([dfcost_npv, dfsocial_npv], axis=1).sum(axis=1) + +twobars(dfplot=dfcombo_npv, basecase=basemap, colors=colors, ax=ax, col=col) + +### Formatting +for col in range(3): + ax[1,col].set_xticks(range(len(cases))) + ax[1,col].set_xticklabels(cases.keys(), rotation=90) + ax[1,col].annotate('Diff', (0.03,0.03), xycoords='axes fraction', fontsize='large') + ax[1,col].axhline(0, c='k', ls='--', lw=0.75) + ## Add commas to y axis labels + if max([abs(i) for i in ax[0,col].get_ylim()]) >= 10000: + ax[0,col].yaxis.set_major_formatter(mpl.ticker.StrMethodFormatter('{x:,.0f}')) +plt.tight_layout() +plots.despine(ax) + +### Save it +slide = add_to_pptx('NPV of system, climate, health costs', width=min(width, 13.33)) +if interactive: + plt.show() + + +#%%### SCOE, NEUE +width = 9 + len(cases)*0.5 +plt.close() +f,ax = plt.subplots( + 1, 4, figsize=(width, 4.5), + gridspec_kw={'wspace':0.7, 'width_ratios':[1,1,1,len(cases)*0.25]}, +) + +### SCOE +col = 0 +dfscoe = {} +for case in cases: + dfscoe[case] = dictin_scoe[case].groupby('year')['Average cost ($/MWh)'].sum().loc[years] + ax[col].plot(dfscoe[case].index, dfscoe[case].values, label=case, color=colors[case]) +ax[col].set_ylim(0) +ax[col].set_ylabel('System cost of electricity [$/MWh]') +dfscoe = pd.concat(dfscoe, axis=1) +## annotate the last value +plots.label_last(dfscoe, ax[col], colors=colors, extend='below') + +### Undiscounted annualized system cost +col = 1 +dfsyscost = {} +for case in cases: + dfsyscost[case] = dictin_syscost[case].sum(axis=1).loc[startyear:lastyear] + ax[col].plot(dfsyscost[case].index, dfsyscost[case].values, label=case, color=colors[case]) +ax[col].set_ylim(0) +ax[col].set_ylabel('Annualized system cost [$B/year]') +dfsyscost = pd.concat(dfsyscost, axis=1) +## annotate the last value +plots.label_last(dfsyscost, ax[col], colors=colors, extend='below') + +### NEUE +col = 2 +dfneue = {} +for case in cases: + if case in dictin_neue: + dfneue[case] = dictin_neue[case].reindex([y for y in years if y >= 2025]) + ax[col].plot(dfneue[case].index, dfneue[case].values, label=case, color=colors[case]) +ax[col].set_ylim(0) +if ax[col].get_ylim()[1] >= 10: + ax[col].axhline(10, c='C7', ls='--', lw=0.75) +ax[col].set_ylabel('NEUE [ppm]') +## annotate the last value +if len(dfneue): + dfneue = pd.concat(dfneue, axis=1) + plots.label_last(dfneue, ax[col], colors=colors, extend='below') + +### Spares +col = 3 +ypad = 0.02 +ax[col].set_ylabel('NPV of system cost [$billion]') +# ax[3].axis('off') +ax[col].bar( + range(len(dfcost_npv)), dfcost_npv.values, + color=[colors[c] for c in dfplot.index], + width=0.8, +) +ymin, ymax = ax[col].get_ylim() +_ypad = (ymax - ymin) * ypad +if ymin < 0: + ax[col].set_ylim(ymin * (1+ypad)) +## label net value +for x, case in enumerate(dfcost_npv.index): + val = dfcost_npv.loc[case].sum() + if len(cases) <= 10: + ax[col].annotate( + f'{np.around(val,-1):.0f}', + (x, val - _ypad), ha='center', va='top', color='k', size=9, + path_effects=[pe.withStroke(linewidth=2.0, foreground='w', alpha=0.8)], + ) + else: + ax[col].annotate( + f'{np.around(val,-1):.0f}', + (x, val + _ypad), ha='center', va='bottom', color='k', size=9, + rotation=90, + ) + +ax[col].set_xticks(range(len(cases))) +ax[col].set_xticklabels(cases, rotation=45, rotation_mode='anchor', ha='right') + +### Legend +leg = ax[0].legend( + loc='upper left', bbox_to_anchor=(-0.3,-0.05), frameon=False, fontsize='large', + handletextpad=0.3, handlelength=0.7, +) +for legobj in leg.legend_handles: + legobj.set_linewidth(8) + legobj.set_solid_capstyle('butt') + +### Formatting +# plt.tight_layout() +plots.despine(ax) +plt.draw() +for col in [0,1] + ([2] if len(dictin_neue) else []): + ax[col].xaxis.set_major_locator(mpl.ticker.MultipleLocator(10)) + ax[col].xaxis.set_minor_locator(mpl.ticker.AutoMinorLocator(2)) + plots.shorten_years(ax[col]) +### Save it +slide = add_to_pptx('Cost, reliability', width=width) +if interactive: + plt.show() + + +#%% Emissions, health +width = 12 +plt.close() +f,ax = plt.subplots(1, 4, figsize=(width, 4.5), gridspec_kw={'wspace':0.6}) + +### CO2 emissions +for col, pollutant in enumerate(['CO2','CO2e']): + note = [] + df = {} + for case in cases: + df[case] = dictin_emissions[case].reindex(years).fillna(0) / 1e6 # tonne->MMT + emissions_allyears = df[case].reindex(allyears).interpolate('linear').loc[startyear_notes:] + ax[col].plot(df[case].index, df[case][pollutant].values, label=case, color=colors[case]) + ## collect more notes + lives = dictin_health_central_mort[case].reindex(allyears).interpolate('linear').sum() + note.append( + f"{case:<{maxlength}} | {startyear_notes}–{lastyear}: " + + f"{emissions_allyears['CO2'].sum()/1e3:.2f} GT CO2" + + f"; {emissions_allyears['CO2e'].sum()/1e3:.2f} GT CO2e" + + f"; {lives:,.0f} lives" + ) + ax[col].set_ylim(0) + ax[col].set_ylabel(f"{pollutant.replace('CO2e','CO2(e)')} emissions [MMT/year]") + ## annotate the last value + df = pd.concat(df, axis=1).xs(pollutant, 1, 'e') + plots.label_last(df, ax[col], colors=colors, extend='both') + +## Notes +ax[0].axhline(phaseout_trigger, c='C7', ls='--', lw=0.75) +ax[1].annotate( + '\n'.join(note), (-0.2, -0.1), xycoords='axes fraction', + annotation_clip=False, fontsize=9, fontfamily='monospace', va='top', ha='left', +) +print('\n'.join(note)) + +### Health impacts - mortality +col = 2 +dfmort = {} +for case in cases: + if case in dictin_health_central_mort: + dfmort[case] = dictin_health_central_mort[case].loc[years] + ax[col].plot(dfmort[case].index, dfmort[case].values, label=case, color=colors[case]) +ax[col].set_ylim(0) +ax[col].set_ylabel('Mortality [lives/year]') +## annotate the last value +dfmort = pd.concat(dfmort, axis=1) +plots.label_last(dfmort, ax[col], colors=colors, extend='both') + +### Health impacts - dollars +col = 3 +dfhealth = {} +for case in cases: + if case in dictin_health_central: + dfhealth[case] = dictin_health_central[case].loc[years] + ax[col].plot(dfhealth[case].index, dfhealth[case].values, label=case, color=colors[case]) +ax[col].set_ylim(0) +ax[col].set_ylabel('Health costs [$B/year]') +## annotate the last value +dfhealth = pd.concat(dfhealth, axis=1) +plots.label_last(dfhealth, ax[col], colors=colors, extend='both') + +### Legend +leg = ax[0].legend( + loc='upper left', bbox_to_anchor=(-0.3,-0.05), frameon=False, fontsize='large', + handletextpad=0.3, handlelength=0.7, +) +for legobj in leg.legend_handles: + legobj.set_linewidth(8) + legobj.set_solid_capstyle('butt') + +### Formatting +# plt.tight_layout() +plots.despine(ax) +plt.draw() +for col in range(4): + ax[col].xaxis.set_major_locator(mpl.ticker.MultipleLocator(10)) + ax[col].xaxis.set_minor_locator(mpl.ticker.AutoMinorLocator(2)) + plots.shorten_years(ax[col]) +### Save it +slide = add_to_pptx('Emissions', width=width) +if interactive: + plt.show() + + +#%%### Generation fraction +ycol = 'Generation (TWh)' +stortechs = [f'battery_{i}' for i in [2,4,6,8,10]] + ['pumped-hydro'] +vretechs = ['upv','wind-ons','wind-ofs','distpv','csp'] +retechs = vretechs + ['hydro','geothermal','biopower'] +zctechs = vretechs + ['hydro','geothermal','nuclear','nuclear-smr'] +fossiltechs = ['coal','coal-ccs_mod','gas-cc','gas-cc-ccs_mod','gas-ct','o-g-s','cofire'] +reccsnuctechs = retechs + ['coal-ccs_mod','gas-cc-ccs_mod','nuclear','nuclear-smr'] + +dftotal = pd.concat({ + case: + dictin_gen[case].loc[~dictin_gen[case].tech.isin(stortechs)].groupby('year')[ycol].sum() + for case in cases +}, axis=1) + +dfvre = pd.concat({ + case: + dictin_gen[case].loc[dictin_gen[case].tech.isin(vretechs)].groupby('year')[ycol].sum() + for case in cases +}, axis=1) + +dfre = pd.concat({ + case: + dictin_gen[case].loc[dictin_gen[case].tech.isin(retechs)].groupby('year')[ycol].sum() + for case in cases +}, axis=1) + +dfzc = pd.concat({ + case: + dictin_gen[case].loc[dictin_gen[case].tech.isin(zctechs)].groupby('year')[ycol].sum() + for case in cases +}, axis=1) + +dffossil = pd.concat({ + case: + dictin_gen[case].loc[dictin_gen[case].tech.isin(fossiltechs)].groupby('year')[ycol].sum() + for case in cases +}, axis=1) + +dfreccsnuc = pd.concat({ + case: + dictin_gen[case].loc[dictin_gen[case].tech.isin(reccsnuctechs)].groupby('year')[ycol].sum() + for case in cases +}, axis=1) + +dfplot = { + 'VRE share [%]': (dfvre / dftotal * 100).loc[startyear:], + 'RE share [%]': (dfre / dftotal * 100).loc[startyear:], + 'Zero carbon share [%]': (dfzc / dftotal * 100).loc[startyear:], + 'RE + Nuclear + CCS share [%]': (dfreccsnuc / dftotal * 100).loc[startyear:], + 'Fossil share [%]': (dffossil / dftotal * 100).loc[startyear:], +} + +### Plot them +plt.close() +f,ax = plt.subplots(1, 5, figsize=(13.33, 4.5), gridspec_kw={'wspace':0.7}) +for col, (ylabel, df) in enumerate(dfplot.items()): + ax[col].set_ylabel(ylabel, labelpad=-4) + ax[col].set_ylim(0,100) + ax[col].yaxis.set_minor_locator(mpl.ticker.AutoMinorLocator(2)) + for case in cases: + ax[col].plot(df.index, df[case].values, label=case, color=colors[case]) + ## annotate the last value + plots.label_last(df, ax[col], mindistance=3.5, colors=colors, extend='both', tail='%') + +### Legend +leg = ax[0].legend( + loc='upper left', bbox_to_anchor=(-0.3,-0.05), frameon=False, fontsize='large', + handletextpad=0.3, handlelength=0.7, +) +for legobj in leg.legend_handles: + legobj.set_linewidth(8) + legobj.set_solid_capstyle('butt') + +### Formatting +plots.despine(ax) +plt.draw() +for col in range(len(dfplot)): + ax[col].xaxis.set_major_locator(mpl.ticker.MultipleLocator(10)) + ax[col].xaxis.set_minor_locator(mpl.ticker.AutoMinorLocator(2)) + plots.shorten_years(ax[col]) +### Save it +slide = add_to_pptx('Generation share') +if interactive: + plt.show() + + +#%%### Firm capacity and capacity credit +capcreditcases = [c for c in cases if int(dictin_sw[c].GSw_PRM_CapCredit)] +if len(capcreditcases): + capcredittechs = ['wind-ons','upv','storage'] + ccseasons = ['hot','cold'] + cccols = len(ccseasons) + len(capcredittechs) + handles = {} + + plt.close() + f,ax = plt.subplots( + 2, cccols, figsize=(11, 6.88), sharex='col', + sharey=('col' if (sharey is True) else False), + ) + ### Firm capacity stack + dfplot = pd.concat( + {case: + dictin_cap_firm[case].loc[ + dictin_cap_firm[case].t==lastyear + ].groupby(['ccseason','i']).MW.sum().rename('GW') / 1e3 + for case in capcreditcases}, + axis=1, + ).T + for col, ccseason in enumerate(ccseasons): + ax[0,col].set_ylabel(f'Firm capacity, {ccseason} [GW]', y=-0.075) + + df = dfplot[ccseason][[c for c in bokehcolors.index if c in dfplot[ccseason]]].copy() + + handles[ccseason] = plot_bars_abs_stacked( + dfplot=df, basecase=basecase, + colors=techcolors, + ax=ax, col=col, net=False, label=False, + ) + + ### Average capacity credit by technology + for _col, tech in enumerate(capcredittechs): + col = _col + len(ccseasons) + if tech == 'storage': + techs = [f'battery_{i}' for i in [2,4,6,8,10]] + ['pumped-hydro'] + else: + techs = [tech] + for case in capcreditcases: + cap_firm = ( + dictin_cap_firm[case] + .loc[dictin_cap_firm[case].i.isin(techs)] + .groupby(['ccseason','t']).MW.sum().unstack('ccseason') / 1e3 + ) + cap_total = ( + dictin_cap[case] + .loc[dictin_cap[case].tech.isin(techs)] + .groupby('year')['Capacity (GW)'].sum().rename('GW').loc[2025:] + ) + capcredit = cap_firm.divide(cap_total, axis=0).dropna() + # for ccseason in lss: + for row, ccseason in enumerate(['hot','cold']): + ax[row,col].plot( + capcredit.index, capcredit[ccseason].values, + color=colors[case], label=case, + ) + + ### Legend + leg = ax[1,len(ccseasons)].legend( + loc='upper left', bbox_to_anchor=(-0.3,-0.07), frameon=False, fontsize='large', + handletextpad=0.3, handlelength=0.7, + ) + for legobj in leg.legend_handles: + legobj.set_linewidth(8) + legobj.set_solid_capstyle('butt') + + ### Formatting + for col in range(2): + ax[1,col].set_xticks(range(len(capcreditcases))) + ax[1,col].set_xticklabels(capcreditcases, rotation=90) + ax[1,col].annotate('Diff', (0.03,0.03), xycoords='axes fraction', fontsize='large') + ax[1,col].axhline(0, c='k', ls='--', lw=0.75) + for _col, tech in enumerate(capcredittechs): + col = _col + len(ccseasons) + ax[0,col].set_ylabel(f'Capacity credit, {tech} [fraction]', y=-0.075) + for row, ccseason in enumerate(['hot','cold']): + ax[row,col].set_ylim(0,1) + ax[row,col].annotate( + ccseason.title(), (0.5, 1.0), xycoords='axes fraction', + fontsize='large', weight='bold', va='top', ha='center' + ) + + plt.tight_layout() + plots.despine(ax) + plt.draw() + for col in range(len(ccseasons),cccols): + ax[1,col].xaxis.set_major_locator(mpl.ticker.MultipleLocator(10)) + ax[1,col].xaxis.set_minor_locator(mpl.ticker.AutoMinorLocator(2)) + plots.shorten_years(ax[1,col]) + + ### Save it + slide = add_to_pptx('Firm capacity, capacity credit') + if interactive: + plt.show() + + +#%%### Transmission +startyear_transgrowth = int(scalars.firstyear_trans_longterm) +for interzonal_only in [False, True]: + if interzonal_only: + labelline = 'Interzonal transmission [TW-mi]' + labelbar = f'{lastyear} Interzonal transmission [TW-mi]' + labelgrowth = f'Interzonal transmission growth,\n{startyear_transgrowth}–{lastyear} [TWmi/year]' + else: + labelline = 'Transmission capacity [TW-mi]' + labelbar = f'{lastyear} Transmission capacity [TW-mi]' + labelgrowth = f'Transmission growth,\n{startyear_transgrowth}–{lastyear} [TWmi/year]' + + plt.close() + f,ax = plt.subplots(1, 4, figsize=(11, 4.5), gridspec_kw={'wspace':0.6}) + + ### Transmission TW-miles over time + for case in cases: + if interzonal_only: + df = ( + dictin_trans[case] + .loc[~dictin_trans[case].trtype.str.lower().isin(['spur','reinforcement'])] + .groupby('year')['Amount (GW-mi)'].sum().reindex(years) / 1e3 + ) + else: + df = dictin_trans[case].groupby('year').sum()['Amount (GW-mi)'].reindex(years) / 1e3 + ax[0].plot(df.index, df.values, label=case, color=colors[case]) + ## annotate the last value + val = np.around(df.loc[max(years)], 0) + 0 + ax[0].annotate( + f' {val:.0f}', + (max(years), val), ha='left', va='center', + color=colors[case], fontsize='medium', + ) + ax[0].set_ylim(0) + ax[0].set_ylabel(labelline) + + ### Disaggregated transmission (for next two plots) + dftrans = pd.concat({ + case: + dictin_trans[case].groupby(['year','trtype'])['Amount (GW-mi)'].sum() + .unstack('trtype') + .reindex(allyears).interpolate('linear') + / 1e3 + for case in cases + }, axis=1) + if interzonal_only: + dftrans = ( + dftrans[[c for c in dftrans if c[1].lower() not in ['spur','reinforcement']]] + ).copy() + + ### Disaggregated final year transmission capacity + df = dftrans.loc[lastyear].unstack('trtype') + plots.stackbar(df=df, ax=ax[1], colors=colors_trans, width=0.8, net=False) + ax[1].set_ylabel(labelbar) + + ### Transmission growth + dftransgrowth = ( + (dftrans.loc[lastyear] - dftrans.loc[startyear_transgrowth]) + / (lastyear - startyear_transgrowth) + ).unstack('trtype') + plots.stackbar(df=dftransgrowth, ax=ax[2], colors=colors_trans, width=0.8, net=False) + ax[2].set_ylabel(labelgrowth) + ax[2].set_ylim(0, max(ax[2].get_ylim()[1], 3.8)) + ## Scales + ymax = ax[2].get_ylim()[1] + scales = { + ## https://cigreindia.org/CIGRE%20Lib/CIGRE%20Session%202010%20paper/B4_306_2010.pdf + 1476 * 6.3 / 1e3: '1× Rio Madeira per year', + } + ## DOE LBWMR + if interzonal_only: + scales[0.73] = 'Mean since 2014 (345+ kV)' + scales[1.46] = 'Max since 2014 (345+ kV)' + scales[3.42] = 'Max since 2009 (345+ kV)' + if not interzonal_only: + scales[0.96] = 'Mean since 2014 (all kV)' + scales[1.83] = 'Max since 2014 (all kV)' + scales[3.64] = 'Max since 2009 (all kV)' + + for y, label in scales.items(): + if y > ymax: + continue + ax[2].annotate( + label, xy=(len(cases), y), xytext=(len(cases)+1, y), annotation_clip=False, + arrowprops={'arrowstyle':'-|>', 'color':'k'}, + ha='left', va='center', color='k', fontsize=11, + ) + ax[2].axhline( + y, c='k', lw=0.5, ls='--', + path_effects=[pe.withStroke(linewidth=1.5, foreground='w', alpha=0.5)]) + + ### Spare + ax[3].axis('off') + + ### Legends + ## Traces + _h, _l = ax[0].get_legend_handles_labels() + leg = ax[0].legend( + # _h[::-1], _l[::-1], + loc='upper left', bbox_to_anchor=(-0.4,-0.05), frameon=False, fontsize='large', + handletextpad=0.3, handlelength=0.7, + ) + for legobj in leg.legend_handles: + legobj.set_linewidth(8) + legobj.set_solid_capstyle('butt') + ## Transmission types + handles = [ + mpl.patches.Patch(facecolor=colors_trans[i], edgecolor='none', label=i) + for i in colors_trans.index if i in dftrans.columns.get_level_values('trtype') + ] + leg = ax[2].legend( + handles=handles[::-1], + loc='upper left', bbox_to_anchor=(1,-0.05), frameon=False, fontsize='large', + handletextpad=0.3, handlelength=0.7, + ) + + ### Formatting + for col in [1,2]: + ax[col].set_xticks(range(len(cases))) + ax[col].set_xticklabels(cases.keys(), rotation=90) + ax[col].yaxis.set_minor_locator(mpl.ticker.AutoMinorLocator(5)) + + plots.despine(ax) + plt.draw() + for col in [0]: + ax[col].xaxis.set_major_locator(mpl.ticker.MultipleLocator(10)) + ax[col].xaxis.set_minor_locator(mpl.ticker.AutoMinorLocator(2)) + plots.shorten_years(ax[col]) + ### Save it + slide = add_to_pptx( + 'Interzonal transmission' if interzonal_only else 'Transmission (all types)') + if interactive: + plt.show() + + +#%%### More transmission plot styles +ylabels = { + 'transgrp': 'Transmission capacity between\nplanning regions [GW]', + 'transreg': 'Interregional transmission\ncapacity [GW]', + 'interconnect': 'Interconnection-seam-crossing\ntransmission capacity [GW]', +} + +plt.close() +f,ax = plt.subplots(1, 4, figsize=(11, 4.5), gridspec_kw={'wspace':0.9}) + +### All transmission [TW-mi] +### Interzonal ("long-distance") [TW-mi] +for col, interzonal_only in enumerate([False,True]): + if interzonal_only: + labelline = 'Interzonal transmission capacity\n[TW-mi]' + else: + labelline = 'Total transmission capacity\n[TW-mi]' + + df = {} + for case in cases: + if interzonal_only: + df[case] = ( + dictin_trans[case] + .loc[~dictin_trans[case].trtype.str.lower().isin(['spur','reinforcement'])] + .groupby('year')['Amount (GW-mi)'].sum().reindex(years) / 1e3 + ) + else: + df[case] = dictin_trans[case].groupby('year').sum()['Amount (GW-mi)'].reindex(years) / 1e3 + ax[col].plot(df[case].index, df[case].values, label=case, color=colors[case]) + ax[col].set_ylim(0) + ax[col].set_ylabel(labelline) + ## annotate the last value + df = pd.concat(df, axis=1) + plots.label_last(df, ax[col], colors=colors, extend='both') + ## Annotate the first value + plots.annotate( + ax[col], list(cases.keys())[0], 2020, (10,-10), + color='C7', arrowprops={'arrowstyle':'-|>','color':'C7'}) +ax[1].set_ylim(0, ax[0].get_ylim()[1]) + +### Interregional (FERC regions) +### Interconnection-seam-crossing +for _col, level in enumerate(['transreg','interconnect']): + col = _col + 2 + df = {} + for case in cases: + df[case] = dictin_trans_r[case].loc[ + dictin_trans_r[case][f'inter_{level}'] == 1 + ].groupby('t').MW.sum().reindex(years).fillna(0) / 1e3 + ax[col].plot(df[case].index, df[case].values, label=case, color=colors[case]) + ax[col].set_ylim(0) + ax[col].set_ylabel(ylabels[level]) + ## annotate the last value + df = pd.concat(df, axis=1) + plots.label_last(df, ax[col], colors=colors, extend='both') + ## Annotate the first value + plots.annotate( + ax[col], list(cases.keys())[0], 2020, (10,(-10 if col == 2 else 10)), + color='C7', arrowprops={'arrowstyle':'-|>','color':'C7'}, + decimals=(0 if df[case][2020] >= 10 else 1), + ) +ax[3].set_ylim(0, ax[2].get_ylim()[1]) + +### Legends +## Traces +_h, _l = ax[0].get_legend_handles_labels() +leg = ax[0].legend( + _h[::-1], _l[::-1], + # loc='lower left', bbox_to_anchor=(-0.02,-0.02), + loc='upper left', bbox_to_anchor=(-0.4,-0.05), + frameon=False, fontsize='large', + handletextpad=0.3, handlelength=0.7, +) +for legobj in leg.legend_handles: + legobj.set_linewidth(8) + legobj.set_solid_capstyle('butt') + +### Formatting +plots.despine(ax) +plt.draw() +for col in [0,1,2,3]: + ax[col].yaxis.set_minor_locator(mpl.ticker.AutoMinorLocator(2)) + ax[col].xaxis.set_major_locator(mpl.ticker.MultipleLocator(10)) + ax[col].xaxis.set_minor_locator(mpl.ticker.AutoMinorLocator(2)) + plots.shorten_years(ax[col]) +### Save it +slide = add_to_pptx('Transmission at different resolutions') +if interactive: + plt.show() + + +#%%### Interregional transfer capability to peak demand ratio try: + f, ax, dfplot = reedsplots.plot_interreg_transfer_cap_ratio( + case=list(cases.values()), + colors={v: colors[k] for k,v in cases.items()}, + casenames={v:k for k,v in cases.items()}, + level='transreg', tstart=startyear, + ymax=None, + ) + ### Save it + slide = add_to_pptx( + 'Interregional transmission / peak demand', + height=(6.88 if ax.shape[1] <= 8 else None), + width=(13.33 if ax.shape[1] > 8 else None), + ) + if interactive: + plt.show() + +except Exception: + print(traceback.format_exc()) + + +#%%### Transmission maps +if len(cases) == 2: plt.close() f,ax = reedsplots.plot_trans_diff( - casebase=casebase, casecomp=casecomp, + casebase=casebase, + casecomp=casecomp, pcalabel=False, wscale=0.0004, subtract_baseyear=2020, yearlabel=True, - year=t, + year=lastyear, alpha=1, dpi=150, titleshorten=titleshorten, ) - # ax[0].set_xlim(-2.5e6,0.4e6) - add_to_pptx(f'Transmission ({t})') + add_to_pptx(f'Transmission ({lastyear})') if interactive: plt.show() -except Exception as err: - print(err) +else: + ### Absolute + wscale = 0.0003 + alpha = 0.8 + for subtract_baseyear in [None, 2020]: + plt.close() + f,ax = plt.subplots( + nrows, ncols, figsize=(13.33, 6.88), + gridspec_kw={'wspace':0.0,'hspace':-0.1}, + ) + for case in cases: + ### Plot it + reedsplots.plot_trans_onecase( + case=cases[case], pcalabel=False, wscale=wscale, + yearlabel=False, year=lastyear, simpletypes=None, + alpha=alpha, scalesize=8, + f=f, ax=ax[coords[case]], title=False, + subtract_baseyear=subtract_baseyear, + thickborders='transreg', drawstates=False, drawzones=False, + label_line_capacity=10, + scale=(True if case == basecase else False), + ) + ax[coords[case]].set_title(case) + ### Formatting + title = ( + f'New interzonal transmission since {subtract_baseyear}' if subtract_baseyear + else 'All interzonal transmission') + for row in range(nrows): + for col in range(ncols): + if nrows == 1: + ax[col].axis('off') + elif ncols == 1: + ax[row].axis('off') + else: + ax[row,col].axis('off') + ### Save it + slide = add_to_pptx(title) + if interactive: + plt.show() -#%%### Capacity diff maps -try: - ### Get the BA map - dfba = reedsplots.get_zonemap(casebase) - endpoints = ( - gpd.read_file(os.path.join(reeds_path,'inputs','shapefiles','transmission_endpoints')) - .set_index('ba_str')) + ### Difference + plt.close() + f,ax = plt.subplots( + nrows, ncols, figsize=(13.33, 6.88), + gridspec_kw={'wspace':0.0,'hspace':-0.1}, + ) + for case in cases: + ax[coords[case]].set_title(case) + if case == basecase: + ### Plot absolute + reedsplots.plot_trans_onecase( + case=cases[case], pcalabel=False, wscale=wscale, + yearlabel=False, year=lastyear, simpletypes=None, + alpha=alpha, scalesize=8, + f=f, ax=ax[coords[case]], title=False, + subtract_baseyear=subtract_baseyear, + thickborders='transreg', drawstates=False, drawzones=False, + label_line_capacity=10, + scale=(True if case == basecase else False), + ) + else: + ### Plot the difference + reedsplots.plot_trans_diff( + casebase=cases[basecase], casecomp=cases[case], + pcalabel=False, wscale=wscale, + yearlabel=False, year=lastyear, simpletypes=None, + alpha=alpha, + f=f, ax=ax[coords[case]], + subtract_baseyear=subtract_baseyear, + thickborders='transreg', drawstates=False, drawzones=False, + label_line_capacity=10, + scale=False, + ) + ### Formatting + title = 'Interzonal transmission difference' + for row in range(nrows): + for col in range(ncols): + if nrows == 1: + ax[col].axis('off') + elif ncols == 1: + ax[row].axis('off') + else: + ax[row,col].axis('off') + ### Save it + slide = add_to_pptx(title) + if interactive: + plt.show() + + +#%%### RA sharing +if detailed: + ralevel = 'transreg' + scale = 10 + wscale = 7e3 + dfmap = reedsplots.get_dfmap(cases[basecase]) + regions = dfmap[ralevel].bounds.minx.sort_values().index + + ### Calculate aggregated load + dictin_load_stress_agg = {} + for case in cases: + if int(dictin_sw[case].GSw_PRM_CapCredit): + hcol = 'ccseason' + df = dictin_peak_ccseason[case].copy() + else: + hcol = 'h' + df = dictin_load_stress[case].copy() + df = ( + df.assign(region=df.r.map(hierarchy[ralevel])) + .groupby(['t','region',hcol]).GW.sum() + .loc[lastyear].unstack('region') + ) + dictin_load_stress_agg[case] = df.sum() + # df['szn'] = df.index.map(lambda x: x.split('h')[0]) + # df = df.reset_index().set_index(['szn','h']) + # dictin_load_stress_agg[case] = df.copy() + + ### Calculate aggregated stress period flows + tran_flow_stress_agg = {} + for case in cases: + if int(dictin_sw[case].GSw_PRM_CapCredit): + df = dictin_prmtrade[case].copy() + hcol = 'ccseason' + else: + df = dictin_tran_flow_stress[case].copy() + hcol = 'h' + df['aggreg'] = df.r.map(hierarchy[ralevel]) + df['aggregg'] = df.rr.map(hierarchy[ralevel]) + df['interface'] = df.aggreg + '|' + df.aggregg + + df = ( + df + .loc[df.aggreg != df.aggregg] + .groupby(['t',hcol,'interface']).GW.sum().unstack('interface').fillna(0) + ) + if df.empty: + continue + else: + df = df.loc[lastyear].copy() + ## Order interfaces alphabetically + rename = {} + for interface in df: + r, rr = interface.split('|') + if r > rr: + rename[interface] = f'{rr}|{r}' + df[interface] *= -1 + df = df.rename(columns=rename).groupby(axis=1, level=0).sum() + ## Now reorder interfaces by flow + rename = {} + for interface in df: + r, rr = interface.split('|') + if df[interface].clip(lower=0).sum() < df[interface].clip(upper=0).abs().sum(): + rename[interface] = f'{rr}|{r}' + df[interface] *= -1 + tran_flow_stress_agg[case] = df.rename(columns=rename).copy() + + ### Calculate regional imports/exports + dfimportexport = {} + for case in cases: + df = {} + for region in regions: + df[region] = reedsplots.get_import_export( + region=region, df=tran_flow_stress_agg[case] + ) + dfimportexport[case] = pd.concat(df).sum(axis=1).unstack(level=0) + dfimportexport[case].columns = dfimportexport[case].columns.rename('region') + + ### Plot it + whiteout = dict(zip( + [f'C{i}' for i in range(10)], + [plt.cm.tab20(i*2+1) for i in range(10)] + )) + if any([v not in whiteout for v in list(colors.values())]): + whiteout = {v: (v[0], v[1], v[2], v[3]*0.7) for v in list(colors.values())} + + for label in ['max','average']: + plt.close() + f,ax = plt.subplots( + nrows, ncols, figsize=(13.33, 6.88), + gridspec_kw={'wspace':0.0,'hspace':-0.1}, + ) + + for case in cases: + ### Formatting + dfmap[ralevel].plot(ax=ax[coords[case]], facecolor='none', edgecolor='C7', lw=0.5) + dfmap['interconnect'].plot(ax=ax[coords[case]], facecolor='none', edgecolor='k', lw=1) + ax[coords[case]].set_title( + case, y=0.95, weight='bold', color=colors[case], fontsize=14) + ax[coords[case]].axis('off') + + ### RA flows + if case not in tran_flow_stress_agg: + continue + + if label == 'max': + ## Max flow + forwardwidth = tran_flow_stress_agg[case].clip(lower=0).max() + reversewidth = abs(tran_flow_stress_agg[case].clip(upper=0).min()) + # ## GWh per day + # wscale = 2.5e3 + # forwardwidth = gwh_forward / numdays + # reversewidth = gwh_reverse / numdays + else: + ## Average when it's flowing + forwardwidth = ( + tran_flow_stress_agg[case].clip(lower=0).sum() + / tran_flow_stress_agg[case].clip(lower=0).astype(bool).sum() + ) + reversewidth = ( + tran_flow_stress_agg[case].clip(upper=0).abs().sum() + / tran_flow_stress_agg[case].clip(upper=0).abs().astype(bool).sum() + ) + + interfaces = tran_flow_stress_agg[case].columns + numdays = len(tran_flow_stress_agg[case]) * int(sw.GSw_HourlyChunkLengthStress) // 24 + + ### Head/tail length: + gwh_forward = tran_flow_stress_agg[case].clip(lower=0).sum() + gwh_reverse = abs(tran_flow_stress_agg[case].clip(upper=0).sum()) + + reversefrac = gwh_reverse / (gwh_reverse + gwh_forward) + forwardfrac = gwh_forward / (gwh_reverse + gwh_forward) + + ### Plot it + for interface in interfaces: + r, rr = interface.split('|') + startx, starty = dfmap[ralevel].loc[r, ['x', 'y']] + endx, endy = dfmap[ralevel].loc[rr, ['x', 'y']] + + plots.plot_segmented_arrow( + ax[coords[case]], + reversefrac=reversefrac[interface], + forwardfrac=forwardfrac[interface], + reversewidth=reversewidth[interface]*wscale, + forwardwidth=forwardwidth[interface]*wscale, + startx=startx, starty=starty, endx=endx, endy=endy, + forwardcolor=colors[case], reversecolor=whiteout[colors[case]], + alpha=0.8, headwidthfrac=1.5, + ) + ### Scale + if scale: + (startx, starty, endx, endy) = (-2.0e6, -1.2e6, -1.5e6, -1.2e6) + yspan = ax[coords[case]].get_ylim() + yspan = yspan[1] - yspan[0] + plots.plot_segmented_arrow( + ax[coords[case]], + reversefrac=0, forwardfrac=1, + reversewidth=0, forwardwidth=scale*wscale, + startx=startx, starty=starty, endx=endx, endy=endy, + forwardcolor=colors[case], reversecolor=whiteout[colors[case]], + alpha=0.8, headwidthfrac=1.5, + ) + ax[coords[case]].annotate( + f"{scale} GW\n{label}", ((startx+endx)/2, starty-(scale/2*wscale)-yspan*0.02), + ha='center', va='top', fontsize=14, + ) + + ### Save it + title = f'{ralevel} {label} RA flows' + slide = add_to_pptx(title) + if interactive: + plt.show() + + +#%%### All-in-one maps +for case in cases: try: - aggreg2anchorreg = pd.read_csv( - os.path.join(casebase,'inputs_case','aggreg2anchorreg.csv'), - index_col='aggreg' - ).squeeze(1) + slide = add_to_pptx( + case, + file=os.path.join( + cases[case],'outputs','maps', + f'map_gencap_transcap-{lastyear}.png'), + width=None, height=6.88, + ) except FileNotFoundError: - aggreg2anchorreg = dict(zip(endpoints.index, endpoints.index)) - endpoints['x'] = endpoints.centroid.x - endpoints['y'] = endpoints.centroid.y - dfba['labelx'] = dfba.geometry.centroid.x - dfba['labely'] = dfba.geometry.centroid.y - dfba['x'] = dfba.index.map(aggreg2anchorreg).map(endpoints.x) - dfba['y'] = dfba.index.map(aggreg2anchorreg).map(endpoints.y) - dfba.st = dfba.st.str.upper() - - ### Aggregate to states - dfstates = dfba.dissolve('st') + print(f'No all-in-one map for {os.path.basename(cases[case])}') - ### Plot it + +#%%### Generation capacity maps +### Shared data +base = cases[list(cases.keys())[0]] +val_r = dictin_cap_r[basecase].r.unique() +dfba = reedsplots.get_zonemap(base).loc[val_r] +dfstates = dfba.dissolve('st') +if len(cases) == 2: for i_plot in i_plots: plt.close() f,ax=plt.subplots( - 1,3,sharex=True,sharey=True,figsize=(14,8), + 1, 3, sharex=True, sharey=True, figsize=(14,8), gridspec_kw={'wspace':-0.05, 'hspace':0.05}, dpi=150, ) _,_,dfplot = reedsplots.plotdiffmaps( - val=mapdiff, i_plot=i_plot, year=t, casebase=casebase, casecomp=casecomp, + val=mapdiff, i_plot=i_plot, year=lastyear, casebase=casebase, casecomp=casecomp, reeds_path=reeds_path, plot='base', f=f, ax=ax[0], dfba=dfba, dfstates=dfstates, - cmap=plt.cm.gist_earth_r, + cmap=cmocean.cm.rain, ) ax[0].annotate( - casebase_name[titleshorten:], + casebase_name, (0.1,1), xycoords='axes fraction', fontsize=10) _,_,dfplot = reedsplots.plotdiffmaps( - val=mapdiff, i_plot=i_plot, year=t, casebase=casebase, casecomp=casecomp, + val=mapdiff, i_plot=i_plot, year=lastyear, casebase=casebase, casecomp=casecomp, reeds_path=reeds_path, plot='comp', f=f, ax=ax[1], dfba=dfba, dfstates=dfstates, - cmap=plt.cm.gist_earth_r, + cmap=cmocean.cm.rain, ) ax[1].annotate( - casecomp_name[titleshorten:], + casecomp_name, (0.1,1), xycoords='axes fraction', fontsize=10) _,_,dfplot = reedsplots.plotdiffmaps( - val=mapdiff, i_plot=i_plot, year=t, casebase=casebase, casecomp=casecomp, + val=mapdiff, i_plot=i_plot, year=lastyear, casebase=casebase, casecomp=casecomp, reeds_path=reeds_path, plot='absdiff', f=f, ax=ax[2], dfba=dfba, dfstates=dfstates, - cmap=plt.cm.bwr, + cmap=plt.cm.RdBu_r, ) # print(dfplot.CAP_diff.min(), dfplot.CAP_diff.max()) ax[2].annotate( '{}\n– {}'.format( - casecomp_name[titleshorten:], - casebase_name[titleshorten:]), + casecomp_name, + casebase_name), (0.1,1), xycoords='axes fraction', fontsize=10) - add_to_pptx(f'Capacity ({t})') + add_to_pptx(f'{i_plot} capacity {lastyear} [GW]') + if interactive: + plt.show() +else: + #### Absolute maps + if (nrows == 1) or (ncols == 1): + legendcoords = max(nrows, ncols) - 1 + elif (nrows-1, ncols-1) in coords.values(): + legendcoords = (nrows-1, ncols-1) + else: + legendcoords = (nrows-2, ncols-1) + + ### Set up plot + for tech in maptechs: + ### Get limits + vmin = 0. + vmax = float(pd.concat({ + case: dictin_cap_r[case].loc[ + (dictin_cap_r[case].i==tech) + & (dictin_cap_r[case].t.astype(int)==lastyear) + ].groupby('r').MW.sum() + for case in cases + }).max()) / 1e3 + if np.isnan(vmax): + vmax = 0. + if not vmax: + print(f'{tech} has zero capacity in {lastyear}, so skipping maps') + continue + ### Set up plot + plt.close() + f,ax = plt.subplots( + nrows, ncols, figsize=(13.33, 6.88), + gridspec_kw={'wspace':0.0,'hspace':-0.1}, + ) + ### Plot it + for case in cases: + dfval = dictin_cap_r[case].loc[ + (dictin_cap_r[case].i==tech) + & (dictin_cap_r[case].t.astype(int)==lastyear) + ].groupby('r').MW.sum() + dfplot = dfba.copy() + dfplot['GW'] = (dfval / 1e3).fillna(0) + + ax[coords[case]].set_title(case) + dfba.plot( + ax=ax[coords[case]], + facecolor='none', edgecolor='k', lw=0.1, zorder=10000) + dfstates.plot( + ax=ax[coords[case]], + facecolor='none', edgecolor='k', lw=0.2, zorder=10001) + dfplot.plot( + ax=ax[coords[case]], column='GW', cmap=cmap, vmin=vmin, vmax=vmax, + legend=False, + ) + ## Legend + if coords[case] == legendcoords: + plots.addcolorbarhist( + f=f, ax0=ax[coords[case]], data=dfplot.GW.values, + title=f'{tech} {lastyear}\ncapacity [GW]', cmap=cmap, vmin=vmin, vmax=vmax, + orientation='horizontal', labelpad=2.25, histratio=0., + cbarwidth=0.05, cbarheight=0.85, + cbarbottom=-0.05, cbarhoffset=0., + ) + + for row in range(nrows): + for col in range(ncols): + if nrows == 1: + ax[col].axis('off') + elif ncols == 1: + ax[row].axis('off') + else: + ax[row,col].axis('off') + ### Save it + slide = add_to_pptx(f'{tech} capacity {lastyear} [GW]') if interactive: plt.show() -except Exception as err: - print(err) + + #### Difference maps + ### Set up plot + for tech in maptechs: + ### Get limits + dfval = pd.concat({ + case: dictin_cap_r[case].loc[ + (dictin_cap_r[case].i==tech) + & (dictin_cap_r[case].t.astype(int)==lastyear) + ].groupby('r').MW.sum() + for case in cases + }, axis=1).fillna(0) / 1e3 + dfdiff = dfval.subtract(dfval[basecase], axis=0) + ### Get colorbar limits + absmax = dfval.stack().max() + diffmax = dfdiff.unstack().abs().max() + + if np.isnan(absmax): + absmax = 0. + if not absmax: + print(f'{tech} has zero capacity in {lastyear}, so skipping maps') + continue + ### Set up plot + plt.close() + f,ax = plt.subplots( + nrows, ncols, figsize=(13.33, 6.88), + gridspec_kw={'wspace':0.0,'hspace':-0.1}, + ) + ### Plot it + for case in cases: + dfplot = dfba.copy() + dfplot['GW'] = dfval[case] if case == basecase else dfdiff[case] + + ax[coords[case]].set_title(case) + dfba.plot( + ax=ax[coords[case]], + facecolor='none', edgecolor='k', lw=0.1, zorder=10000) + dfstates.plot( + ax=ax[coords[case]], + facecolor='none', edgecolor='k', lw=0.2, zorder=10001) + dfplot.plot( + ax=ax[coords[case]], column='GW', + cmap=(cmap if case == basecase else cmap_diff), + vmin=(0 if case == basecase else -diffmax), + vmax=(absmax if case == basecase else diffmax), + legend=False, + ) + ## Difference legend + if coords[case] == legendcoords: + plots.addcolorbarhist( + f=f, ax0=ax[coords[case]], data=dfplot.GW.values, + title=f'{tech} {lastyear}\ncapacity, difference\nfrom {basecase} [GW]', + cmap=(cmap if case == basecase else cmap_diff), + vmin=(0 if case == basecase else -diffmax), + vmax=(absmax if case == basecase else diffmax), + orientation='horizontal', labelpad=2.25, histratio=0., + cbarwidth=0.05, cbarheight=0.85, + cbarbottom=-0.05, cbarhoffset=0., + ) + ## Absolute legend + plots.addcolorbarhist( + f=f, ax0=ax[coords[basecase]], data=dfval[basecase].values, + title=f'{tech} {lastyear}\ncapacity [GW]', + cmap=cmap, vmin=0, vmax=absmax, + orientation='horizontal', labelpad=2.25, histratio=0., + cbarwidth=0.05, cbarheight=0.85, + cbarbottom=-0.05, cbarhoffset=0., + ) + + for row in range(nrows): + for col in range(ncols): + if nrows == 1: + ax[col].axis('off') + elif ncols == 1: + ax[row].axis('off') + else: + ax[row,col].axis('off') + ### Save it + slide = add_to_pptx(f'Difference: {tech} capacity {lastyear} [GW]') + if interactive: + plt.show() + #%% Save the powerpoint file -savename = os.path.join( - outpath, f"diff-{casebase_name}.pptx" -) prs.save(savename) -print(savename) +print(f'\ncompare_casegroup.py results saved to:\n{savename}') ### Open it if sys.platform == 'darwin': - sp.run(f'open {savename}', shell=True) + sp.run(f"open '{savename}'", shell=True) elif platform.system() == 'Windows': sp.run(f'"{savename}"', shell=True) diff --git a/postprocessing/example.csv b/postprocessing/example.csv new file mode 100644 index 0000000..4963977 --- /dev/null +++ b/postprocessing/example.csv @@ -0,0 +1,7 @@ +casepath,casename,color +# /Volumes/ReEDS/Users/pbrown/ReEDSruns/20240115_transop/20240307/v20240307_transopK0_Restricted,Restricted, +# /Volumes/ReEDS/Users/pbrown/ReEDSruns/20240115_transop/20240307/v20240307_transopK0_Permit_CoordLite38,Regional coord., +# /Volumes/ReEDS/Users/pbrown/ReEDSruns/20240115_transop/20240307/v20240307_transopK0_Permit_CoordFull,Interregional coord., +# /Volumes/ReEDS/Users/pbrown/ReEDSruns/20240115_transop/20240307/v20240307_transopK0_Permit_CoordFull_ITC,Interregional coord. + ITC, +/Volumes/ReEDS/Users/pbrown/ReEDSruns/20240112_stresspaper/20240313/v20240313_stresspaperE0_CC_DemHi_90by2035__core,Capacity credit,C4 +/Volumes/ReEDS/Users/pbrown/ReEDSruns/20240112_stresspaper/20240313/v20240313_stresspaperE0_SP_DemHi_90by2035__core,Stress periods,plt.cm.tab10(1) diff --git a/postprocessing/land_use/README.md b/postprocessing/land_use/README.md new file mode 100644 index 0000000..dcd956e --- /dev/null +++ b/postprocessing/land_use/README.md @@ -0,0 +1,37 @@ +# Overview + +The `land_use_analysis.py` script uses the outputs from `reeds_to_rev.py` to +estimate total land use from a ReEDS scenario for a set of specified reV characterizations. + +# How to run + +The land_use analysis processing can be run as part of a ReEDS run by setting `land_use_analysis=1` (`reeds_to_rev` must also be enabled). By default the script runs for all supported technologies (upv and wind-ons). + +Alternatively, the script can be run as a standalone for a specified ReEDS run by passing the folder of the run: `python postprocessing/land_use/land_use_analysis.py /kfs3/scratch/bsergi/ReEDS-2.0/runs/[run folder]`. See the passed arguments options in the script for some additional options for running in standalone mode. + +When running, users specify which land characterizations from the reV supply curve they want to process in `postprocessing/land_use/inputs/process_categories.json`. Each entry has a format like the following: + +``` +"nlcd": {"colname": "nlcd_2019_90x90", "newcolname":"nlcd_value", "mapping": "nlcd_combined_categories"} +``` + +In this example, "nlcd" is a characterization name provided by the user reflecting the column of interest. It is also use for formatting the name of the output file (see details below). + +The dictionary that follows the characterization name provides information on the column to be processed. +- The entry specified for "colname" indicates the reV supply curve column to process. Typically this is a single string but can be a list if the shorthand name is specified as "species". +- An optional entry for "mapping" that specifies a file in `postprocessing/land_use/inputs` to use for mapping the reV json values to new categories. +- An optional entry for "newcolname" that specifies which column in the mapping file to map to (must be specified if mapping is present). + +Users should configure the `process_categories.json` as needed before running. + +# Outputs + +The script produces files named in the format `land_use_[tech]_[chracterization].csv.gz` in the outputs folder of the ReEDS run for each characterization specified in the json configure file. For characterizations defined by json files the outputs will include 1 row of data per supply curve point / land cover type / ReEDS built out year. To reduce file size this only includes data for the first simulation year (usually 2023 or 2024) and the last year. The columns include information on the area and built capacity for the supply curve point as a whole and each land cover category within that supply curve point. + +For characterizations defined by the number of cells of coverage (typically species habitat and range), all columns are combined into one "species" land use file in long format by characterization, with output including total supply curve area and buildout and the area of the species habitat/range in that supply curve. + +# Notes + +The `land_use_analysis.py` requires outputs from `reeds_to_rev.py` as well as the ability to read in the reV supply curves for the corresponding run. Path information for the supply curves is taken from the run's `rev_paths.csv` file. + +Area estimates are derived assuming a reV cell resolution of 90x90 m resolution. For wind the area estimates include indirect land use (i.e., land needed between turbines for spacing). \ No newline at end of file diff --git a/postprocessing/land_use/inputs/federal_land_lookup.csv b/postprocessing/land_use/inputs/federal_land_categories.csv similarity index 100% rename from postprocessing/land_use/inputs/federal_land_lookup.csv rename to postprocessing/land_use/inputs/federal_land_categories.csv diff --git a/postprocessing/land_use/inputs/nlcd_classifications.csv b/postprocessing/land_use/inputs/nlcd_categories.csv similarity index 100% rename from postprocessing/land_use/inputs/nlcd_classifications.csv rename to postprocessing/land_use/inputs/nlcd_categories.csv diff --git a/postprocessing/land_use/inputs/nlcd_combined_categories.csv b/postprocessing/land_use/inputs/nlcd_combined_categories.csv new file mode 100644 index 0000000..ef77260 --- /dev/null +++ b/postprocessing/land_use/inputs/nlcd_combined_categories.csv @@ -0,0 +1,21 @@ +nlcd_value,land_type +11,water +12,perennial ice/snow +21,developed +22,developed +23,developed +24,developed +31,barren +41,forest +42,forest +43,forest +51,shrubland +52,shrubland +71,grassland +72,wetland +73,lichens +74,moss +81,hay/pasture +82,cropland +90,wetland +95,wetland \ No newline at end of file diff --git a/postprocessing/land_use/inputs/process_categories.json b/postprocessing/land_use/inputs/process_categories.json new file mode 100644 index 0000000..a0eaf4d --- /dev/null +++ b/postprocessing/land_use/inputs/process_categories.json @@ -0,0 +1,5 @@ +{ + "fed_land": {"colname": "fed_land_owner", "newcolname": "fed_land_owner", "mapping": "federal_land_categories"}, + "nlcd": {"colname": "nlcd_2019_90x90", "newcolname":"nlcd_value", "mapping": "nlcd_combined_categories"}, + "species": {"colname": ["habitat", "range", "ungulate_use_areas"]} +} \ No newline at end of file diff --git a/postprocessing/land_use/inputs/usgs_categories.csv b/postprocessing/land_use/inputs/usgs_categories.csv new file mode 100755 index 0000000..8df35ad --- /dev/null +++ b/postprocessing/land_use/inputs/usgs_categories.csv @@ -0,0 +1,19 @@ +usgs_code,usgs_class +1,water +2,developed +3,mechanically disturbed national forests +4,mechanically disturbed other public lands +5,mechanically disturbed private +6,mining +7,barren +8,deciduous forest +9,evergreen forest +10,mixed forest +11,grassland +12,shrubland +13,cropland +14,hay/pasture +15,herbaceous wetland +16,woody wetland +17,perennial ice/snow +9999,unclassified \ No newline at end of file diff --git a/postprocessing/land_use/inputs/usgs_combined_categories.csv b/postprocessing/land_use/inputs/usgs_combined_categories.csv new file mode 100644 index 0000000..91e7ab4 --- /dev/null +++ b/postprocessing/land_use/inputs/usgs_combined_categories.csv @@ -0,0 +1,18 @@ +usgs_code,land_type +1,water +2,developed +3,forest +4,forest +5,forest +6,barren +7,barren +8,forest +9,forest +10,forest +11,grassland +12,shrubland +13,cropland +14,hay/pasture +15,wetland +16,wetland +17,perennial ice/snow \ No newline at end of file diff --git a/postprocessing/land_use/land_use_analysis.py b/postprocessing/land_use/land_use_analysis.py index 65e1f1e..8e6c026 100644 --- a/postprocessing/land_use/land_use_analysis.py +++ b/postprocessing/land_use/land_use_analysis.py @@ -1,56 +1,59 @@ ''' -This script takes uses the outputs from reeds_to_rev.py to evaluate -land-use impacts for a ReEDS scenario. It is run automatically during a ReEDS -run when the `reeds_to_rev` switch is enabled. +This script uses the outputs from reeds_to_rev.py to estimate total land use from a ReEDS scenario +for a set of specified reV characterizations. It is run automatically during a ReEDS +run with `land_use_analysis=1` (`reeds_to_rev` must also be enabled). -The processing is currently only configured to work for UPV, but will -hopefully be expanded to work for other technologies in the future. +The processing is currently only configured to work for upv and wind-ons. -Note that land-use fields must be processed as part of the rev run to be analyzed here. +For more details see the README in the postprocessing/land_use folder. -Passed arguments: - - path to ReEDS scenario - -Other input data: - - rev supply curve aggreation file that contains land-use charactistics - - mapping files for relevant land use categories (see /postprocessing/land_use/inputs) - -Outputs: - - land_use_[category].csv: saved to the output folder of the ReEDS runs. +Author: bsergi ''' +import argparse +import json import os -import sys import pandas as pd -import argparse +import site +import sys import time -from input_processing.ticker import makelog +import traceback +from collections import OrderedDict +from glob import glob ####################### # Helper functions ####################### +# loads information on supply curve columns to process from input json +def loadCategoriesToProcess(reedspath, jsonfilename="process_categories"): + configpath = os.path.join(reedspath, "postprocessing", "land_use", "inputs", f"{jsonfilename}.json") + + with open(configpath, "r") as f: + json_data = json.load(f, object_pairs_hook=OrderedDict) + return json_data + # calculates total land area required -def totalLand(scen_path, upv_land_use, total_area): +def totalLand(scen_path, tech_land_use, total_area, tech, capacity_col): print("...processing total land use.") # check whether each supply curve point is full (within tolerance of 0.1 MW) - upv_land_use['sc_full_bool'] = (abs(upv_land_use['capacity'] - upv_land_use['built_capacity']) < 0.1) - upv_land_use['fraction_built'] = upv_land_use['built_capacity'] / upv_land_use['capacity'] - upv_land_use['built_area_sq_km'] = upv_land_use['area_sq_km'] * upv_land_use['fraction_built'] + tech_land_use['sc_full_bool'] = (abs(tech_land_use[capacity_col] - tech_land_use['built_capacity']) < 0.1) + tech_land_use['fraction_built'] = tech_land_use['built_capacity'] / tech_land_use[capacity_col] + tech_land_use['built_area_sq_km'] = tech_land_use['area_sq_km'] * tech_land_use['fraction_built'] # calculate total area - upv_land_use['built_capacity_MW'] = upv_land_use['built_capacity'] - upv_area = upv_land_use.groupby(['year'])[["built_capacity_MW", "built_area_sq_km"]].sum() + tech_land_use['built_capacity_MW'] = tech_land_use['built_capacity'] + tech_area = tech_land_use.groupby(['year'])[["built_capacity_MW", "built_area_sq_km"]].sum() # current total is area available for development, not total non-excluded area # does not currently use multiple years of land-use data - upv_area['avail_area_sq_km'] = total_area + tech_area['avail_area_sq_km'] = total_area # save output - upv_area.to_csv(os.path.join(scen_path, "outputs", "land_use_upv.csv"), float_format='%.2f') + tech_area.to_csv(os.path.join(scen_path, "outputs", f"land_use_{tech}.csv"), float_format='%.6f') # calculates capacity deployed by land use category # assumes for now that capacity is evenly distributed by land-use type, @@ -59,87 +62,183 @@ def calculateCapacityByUse(df, tag): # NAs indicate no cells or builds in that supply curve point # df = df.fillna(0) - df['built_capacity'] = df['built_capacity'].fillna(0) + df['built_capacity_MW'] = df['built_capacity_MW'].fillna(0) df['fraction_built'] = df['fraction_built'].fillna(0) - frac_column = "fraction_" + tag - # get total area by class - df[frac_column] = df['cells'] / df['n_gids'] - df[tag + '_area_sq_km'] = df[frac_column] * df['area_sq_km'] + frac_column = "fraction_sc_area" + # this is the fraction of the land use category to the total supply curve + df[frac_column] = df['area_sq_km_land_use'] / df['area_sq_km'] # amount of capacity built in each land class is (total capacity built in sc_point) x (share of land use / total sc point) - df[tag + '_built_capacity'] = df[frac_column] * df['built_capacity'] + df[tag + '_built_capacity_MW'] = df[frac_column] * df['built_capacity_MW'] + + # area used is a function of the fraction of total sc curve area used + df[tag + '_area_built_sq_km'] = df[frac_column] * df['area_sq_km'] * df['fraction_built'] - # area use is a function of the fraction of total sc curve area used - df[tag + '_area_sq_km_built'] = df[tag + '_area_sq_km'] * df['fraction_built'] + # drop fraction column + df.drop(frac_column, axis=1, inplace=True) return df # helper function to parse any columns with JSON data -def parseJSON(df, col, var, mapping=None): - out = df[col].apply(pd.read_json, typ="series") +def parseJSON(df, col, var, reedspath, mapping=None, + keepcols=["sc_point_gid", "latitude", "longitude", "cnty_fips", "area_sq_km"]): + + # select data based on column (can be a regular expression for multiple columns) + jsondata = df.filter(regex=(col)) + + # check to make sure there is only 1 column + if jsondata.shape[1] == 0: + print("Warning: no columns matched. Check regular expression " + f"used to select supply curve columns: {col}. " + ) + print(f"List of available columns: {df.columns}") + raise Exception("No matching columns") + + elif jsondata.shape[1] > 1: + print(f"Warning: identified {jsondata.columns} as columns matching the " + f"supplied regular expression {col}. " + "Will select the first column to proceed; if that isn't correct " + "review the column selection expression." + ) + jsondata = jsondata[jsondata.columns[0]] + print(f"...column to process: {jsondata.name}") + + st = time.time() + elements = ','.join(jsondata.tolist()) + col_data = json.loads(f"[{elements}]") + out = pd.DataFrame(col_data) + et = time.time() + print("...elapsed time for json.loads function: %0.2f seconds" % (et - st)) + + # drop any columns named '0'; these are actually areas outside the U.S. that get + # captured as excluded for sc_points on the border with Canada or Mexico. + out = out.drop([0, '0'] , axis=1, errors='ignore') # NAs are zeros out.fillna(0, inplace=True) - # get sum of cells classified by land-use - out['n_gids_land_use'] = out.sum(axis=1) + + # convert to cell count to area + # assumes each cell is 90x90 m and converts to sq km + cell_area = 90*90 / 1E6 + out = out * cell_area # line up sc_point_gid, year, and other info from original mapping data - out['sc_point_gid'] = df['sc_point_gid'] - out['state'] = df['state'] - out['area_sq_km'] = df['area_sq_km'] - out['n_gids'] = df['n_gids'] - - # land-use mapping files do not vary by year, but this capability may be added eventually - # out['year'] = df['year'] - - # compare total cells in sc point to those mapped by the category - # use '9999' to indicate capacity built on cells that aren't classified - out[9999] = out['n_gids'] - out['n_gids_land_use'] - + dropcols = [] + for kc in keepcols: + if kc in df.columns: + out[kc] = df[kc] + else: + print(f"...Warning: missing {kc} in the supply curve data so will skip in output.") + dropcols.append(kc) + keepcols = [kc for kc in keepcols if kc not in dropcols] + # melt mapping data to long format - out = out.melt(id_vars=["sc_point_gid", "state", "area_sq_km", "n_gids", "n_gids_land_use"], var_name=var, value_name="cells") + out = out.melt(id_vars=keepcols, var_name=var, value_name="area_sq_km_land_use") + + # convert codes to integers + out[var] = out[var].astype('float').astype('int') + + # identify codes in the data + codes_to_map = list(out[var].unique()) + codes_to_map.sort() + print(f"Land use codes identified: {codes_to_map}") # add mapping details (files found in ReEDS postprocessing module) if mapping is not None: try: - df_map = pd.read_csv(os.path.join("postprocessing", "land_use", "inputs", mapping + ".csv")) + df_map = pd.read_csv(os.path.join(reedspath, "postprocessing", "land_use", "inputs", mapping + ".csv")) + except: + print("Error: could not read specified mapping file. Check file path:" + f"{os.path.join(reedspath, 'postprocessing', 'land_use', 'inputs', mapping + '.csv')}" + ) + try: + df_map_subset = df_map.loc[df_map[var].isin(codes_to_map)] + print("Applying the following mapping: ") + print(df_map_subset) + out = out.merge(df_map.drop("color", axis=1, errors="ignore"), how="left", on=var) - out.drop([var,'n_gids_land_use'], axis=1, inplace=True) + + # check for missing values + missing = [v for v in out[var].unique() if v not in df_map[var].unique()] + if len(missing) > 0: + print(f"Warning: missing the following {var} values from mapping file: {missing}\n" + "Will be assigned as 'missing'." + ) + new_cols = df_map.drop([var, "color"], axis=1, errors="ignore").columns + out.update(out[new_cols].fillna('missing')) except: - print("Error reading specified mapping file.") + print(f"Error: merge with mapping file failed. Check column value in file for {var}:") + return out # this function processes land-use categories defined by JSONs -def processLandUseJSON(df_name, df_vals, scen_path, land_use_map, upv_land_use): +def processLandUseJSON(df_name, tech, df_vals, scen_path, reedspath, rev_sc, tech_land_use=None, area_only=False): - print("...processing %s classification." % df_name, end=" ") + print("...processing %s classification." % df_name) - # get nlcd land classifications - land_class = parseJSON(land_use_map, df_vals[0], df_vals[1], mapping=df_vals[2]) - - # expand land-use classification to match years of builds - land_class_merge = expandYears(land_class, upv_land_use) - land_use = land_class_merge.merge(upv_land_use[['year', 'sc_point_gid', 'built_capacity', 'fraction_built']], on=["sc_point_gid", "year"], how="outer") - - # allocate capacity to each land use category - land_use = calculateCapacityByUse(land_use, df_name) - - # rename cells column - var_name = df_name + "_n_gids" - land_use.rename(columns={"cells": var_name}, inplace=True) + # get land classifications + land_class = parseJSON(rev_sc, df_vals["colname"], df_vals["newcolname"], reedspath, mapping=df_vals["mapping"]) - # drop rows where there the sc doesn't have that classification to reduce file size - # keep rows with no built capacity as this helps with calculating total use later - land_use = land_use[land_use[var_name] > 0] + # if only running to get area (no capacity buildouts), save results and end here + if area_only: + land_class = land_class.assign(cnty_fips='p'+land_class.cnty_fips.astype(str).map('{:>05}'.format)) - # save outputs - land_use.to_csv(os.path.join(scen_path, "outputs", "land_use_%s.csv.gz" % df_name), float_format='%.2f', index=False) + print(f"Writing outputs to {os.path.join(os.path.dirname(scen_path), f'area_{df_name}.csv.gz')}") + land_class.to_csv(os.path.join( + os.path.dirname(scen_path), f"area_{df_name}.csv.gz" + ), float_format='%.6f', index=False) + + else: + # select data based on column (can be a regular expression for multiple columns) + # expand landjsondata classification to match + + # build years to focus on for results + land_class_merge = expandYears(land_class, tech_land_use) + tech_land_use_merge = tech_land_use.loc[tech_land_use.year.isin(land_class_merge.year.unique())] + + # merge buildout with land use categories + # use outer join to include available land from areas with no capacity + land_use = land_class_merge.merge(tech_land_use_merge[['year', 'sc_point_gid', 'built_capacity_MW', 'fraction_built']], on=["sc_point_gid", "year"], how="outer") + + # allocate capacity to each land use category + land_use = calculateCapacityByUse(land_use, df_name) + + # rename some columns + land_use.rename(columns={'area_sq_km_land_use': f'{df_name}_area_avail_sq_km', + 'area_sq_km': 'sc_area_avail_sq_km', + 'built_capacity_MW': 'sc_built_capacity_MW', + 'fraction_built': 'sc_fraction_built'}, inplace=True) + + # preserve leading zero in fips code + land_use = land_use.assign(cnty_fips='p'+land_use.cnty_fips.astype(str).map('{:>05}'.format)) + + # reorder columns + allcols = land_use.columns + sccols = [col for col in allcols if 'sc_' in col] + ['latitude', 'longitude', 'cnty_fips', 'year'] + landcols = [col for col in allcols if col not in sccols] + land_use = land_use[sccols + landcols] + + # save outputs + print(f"Writing outputs to {os.path.join(scen_path, 'outputs', f'land_use_{tech}_{df_name}.csv.gz')}") + land_use.to_csv(os.path.join( + scen_path, "outputs", f"land_use_{tech}_{df_name}.csv.gz" + ), float_format='%.6f', index=False) # expand land-use mapping for all ReEDS years -def expandYears(land_mapping, reeds_results): +def expandYears(land_mapping, reeds_results, yearsub='firstlast'): df_out = pd.DataFrame() - years = reeds_results['year'].unique().tolist() + years_all = reeds_results['year'].unique().tolist() + if yearsub == 'firstlast': + firstyear = max(y for y in [years_all[0], 2020, 2021, 2022, 2023, 2024] if y in years_all) + #firstyear = 2020 + years = [firstyear, years_all[-1]] + elif yearsub == 'all': + years = years_all + else: + print("Years not specified; defaulting to last.") + years = [years_all[-1]] + for y in years: df = land_mapping.copy() df['year'] = y @@ -147,109 +246,208 @@ def expandYears(land_mapping, reeds_results): return df_out # process use of land identified as species range/habitat -def getSpeciesImpact(scen_path, upv_land_use): +# unlike other land use categories (which have the number of cells by category stored in a json file), +# these columns include just one number that reflects the number of cells in the category +def getSpeciesImpact(tech, scen_path, rev_sc, tech_land_use, species_col_list): print("...processing species habitat and range information.") + species_cols = [] + for species_col in species_col_list['colname']: + species_cols.extend([col for col in rev_sc.columns if species_col in col ]) + + if len(species_cols) == 0: + print("No species columns found that contain specified substrings; " + "check colname values in process_categories.json against available supply curve columns") + print(f"List of available columns: {rev_sc.columns}") + else: + print(f"Found the following species colums: {species_cols}") - species_cols = [col for col in upv_land_use.columns if 'range' in col or 'habitat' in col] - id_cols = ['year', 'region', 'sc_point_gid', 'sc_full_bool', 'n_gids', 'fraction_built', 'area_sq_km', 'built_area_sq_km'] - species_land_use = upv_land_use[id_cols+ species_cols] + id_cols = ['year','region','sc_point_gid','sc_full_bool','fraction_built','area_sq_km','built_area_sq_km','built_capacity_MW'] + species_land_use = tech_land_use[id_cols] + + # using left join here for now but may want to revise to capture species habitat/range outside of built areas + species_land_use = pd.merge(species_land_use, rev_sc[['sc_point_gid'] + species_cols], on='sc_point_gid', how='left') # melt to long - species_land_use = pd.melt(species_land_use, id_vars=id_cols, value_vars=species_cols, var_name="species", value_name="species_n_gids") + species_land_use = pd.melt(species_land_use, id_vars=id_cols, + value_vars=species_cols, var_name="species_var", value_name="species_var_cells") # categorize by species and impact type - species_land_use['species_extent'] = species_land_use['species'].str.replace(".*_", "", regex=True) - species_land_use['species'] = species_land_use['species'].str.replace("_.*", "", regex=True) - - # calculate density - species_land_use['species_density'] = species_land_use['species_n_gids'] / species_land_use['n_gids'] + # only works for certain formats for reV so needs some modifications before folding into main workflow + # species_land_use['species_extent'] = species_land_use['species'].str.replace(".*_", "", regex=True) + # species_land_use['species'] = species_land_use['species'].str.replace("_.*", "", regex=True) + + # calculate area for species variable + # assumes each cell is 90x90 m and converts to sq km + cell_area = 90*90 / 1E6 + species_land_use['species_area_sq_km'] = species_land_use['species_var_cells'] * cell_area + + # calculate density as a fraction of developable area (consider converting calculation to total area in sc point) + #species_land_use['species_density'] = species_land_use['species_area'] / species_land_use['area_sq_km'] + + species_land_use.rename(columns={ + 'area_sq_km': 'sc_area_avail_sq_km', + 'built_area_sq_km': 'sc_built_area_sq_km', + 'built_capacity_MW': 'sc_built_capacity_MW' + }, inplace=True) + + species_land_use.drop("species_var_cells", axis=1, inplace=True) # save outputs - species_land_use.to_csv(os.path.join(scen_path, "outputs", "land_use_species.csv.gz"), float_format='%.2f', index=False) + species_land_use.to_csv(os.path.join(scen_path, "outputs", f"land_use_{tech}_species.csv.gz"), float_format='%.6f', index=False) # primary process function called by main loop for each tech -def getLandUse(scenario, scen_path, rev_paths, tech): +def getLandUse(scen_path, jsonfile, rev_paths, reedspath, tech, capacity_col="capacity_mw_ac"): + scenario = os.path.basename(scen_path) print("Getting %s land-use data for %s" % (tech, scenario)) # select rev case for tech being processed - rev = rev_paths.loc[rev_paths.tech == tech].squeeze() - - # for older UPV rev runs land-use data had to be added in a separate file. - # newer runs using the 2022 UPV sc will now have this directly in the "aggregation" sc file - if "2021_Update" in rev.rev_path: - map_file = "%s_agg_land_use.csv.gz" % rev.rev_case - else: - map_file = "%s_supply-curve-aggregation.csv" % rev.rev_case + rev_paths = rev_paths.loc[rev_paths.tech == tech].squeeze() + # first attempt for supply curve should be the specified rev sc file for the run + sc_file = rev_paths.sc_file try: - land_use_map = pd.read_csv(os.path.join(rev.rev_path, map_file)) + rev_sc = pd.read_csv(sc_file) except: - sys.exit("Error reading rev mapping file. Check that appropriate file is in the rev folder.") - + print("...Warning: failed to read default sc file. Will attempt to read secondary sc file.") + # for older UPV rev runs land-use data had to be added in a separate file. + # newer runs using the 2022 UPV sc will now have this directly in the "aggregation" sc file + rev_folder = os.path.join(rev_paths.sc_path, "reV", rev_paths.rev_case) + if "2021_Update" in rev_paths.rev_path: + map_file = os.path.join(rev_folder, "%s_agg_land_use.csv.gz" % rev_paths.rev_case) + else: + # using glob here to catch differences between "-" and "_" in the sc file name + sc_matches = glob(os.path.join(rev_folder, "**supply*curve*aggregation**")) + if len(sc_matches) > 1: + print(f"Multiple sc curve aggregation files detected; using {os.basename(sc_matches[0])}") + map_file = sc_matches[0] + try: + rev_sc = pd.read_csv(map_file) + except: + sys.exit("Error reading rev mapping file. Check that appropriate file is in the rev folder.") # load ouputs from reeds_to_rev.py script - builds_upv = pd.read_csv(os.path.join(scen_path, "outputs", "df_sc_out_upv_reduced.csv")) + try: + builds_tech = pd.read_csv(os.path.join(scen_path, "outputs", f"df_sc_out_{tech}_reduced.csv")) + except: + sys.exit(f"Error reading df_sc_out_{tech}_reduced.csv file; check that reeds_to_rev.py ran successuflly.") # if using land-use features that change over time then merge on year, otherwise ignore year - upv_land_use = builds_upv.merge(land_use_map, on=['sc_point_gid']) - total_area = land_use_map['area_sq_km'].sum() - - ## Total land area estimates #### - totalLand(scen_path, upv_land_use, total_area) - - ## Land classifications defined by JSON mappings #### - - # dictionary defines land use categories specified with JSON files - # format is short name: (name of column in rev mapping, new column name, mapping file for renaming values if any) - json_data = {"fed_land": ("fed_land_owner", "fed_land_owner", "federal_land_lookup"), # federal land ownership - "nlcd": ("usa_mrlc_nlcd2011", "nlcd_value", "nlcd_classifications")} # national land cover database - - for df_name in json_data: - + tech_land_use = builds_tech[['year','region','sc_point_gid','built_capacity']].merge( + rev_sc[['sc_point_gid', 'area_sq_km', capacity_col]], on=['sc_point_gid'] + ) + + # estimate of total developable area from reV + total_area = rev_sc['area_sq_km'].sum() + # calculate total built area + totalLand(scen_path, tech_land_use, total_area, tech, capacity_col) + + # load supply curve categories to process + json_data = loadCategoriesToProcess(reedspath, jsonfile) + + # iterate over list of categories to process from input json file + for df_name in json_data: st = time.time() - processLandUseJSON(df_name, json_data[df_name], scen_path, land_use_map, upv_land_use) + try: + # species-like reV columns get special treatment + if df_name == "species": + getSpeciesImpact(tech, scen_path, rev_sc, tech_land_use, json_data[df_name]) + # all other columns are assumed to have json information with the number of cells by land category + else: + processLandUseJSON(df_name, tech, json_data[df_name], scen_path, reedspath, rev_sc, tech_land_use) + except Exception as err: + print(f"Error processing {df_name}") + print(err) + print("\n skipping to next item for processing.") et = time.time() print("(elapsed time: %0.2f seconds)" % (et - st)) - ## Species habitat and range #### - getSpeciesImpact(scen_path, upv_land_use) - - ####################### # Main ####################### + +# function to bypass ReEDS results and just summarize area for a supply curve +def summarizeSupplyCurve(scpath, reedspath, jsonfile, techs): + print("\nSummarizing supply curves via 'land_use_analysis.py' script.\n") -if __name__ == '__main__': - - # Argument inputs - parser = argparse.ArgumentParser(description="""This script calculates evaluates land-use implications for - solar buildouts from one or more ReEDS runs. - Requires the 'reeds_to_rev.py' to have been run.""") - parser.add_argument("scenario", help="Folder of ReEDS run") - args = parser.parse_args() - - scen = os.path.basename(args.scenario) - scen_path = args.scenario + # read in sc data + rev_sc = pd.read_csv(scpath) - #%% Set up logger - log = makelog(scriptname=__file__, logpath=os.path.join(args.scenario,'gamslog.txt')) + # load supply curve categories to process + json_data = loadCategoriesToProcess(reedspath, jsonfile) + for df_name in json_data: + st = time.time() + for tech in techs: + try: + processLandUseJSON(df_name, tech, json_data[df_name], scpath, reedspath, rev_sc, area_only=True) + except Exception as err: + print(f"Error processing {df_name}") + #print(err) + print(traceback.format_exc()) + print("\n skipping to next item for processing.") + et = time.time() + print("(elapsed time: %0.2f seconds)" % (et - st)) + +def runLandUse(scen_path, reedspath, jsonfile, techs): print("\nRunning 'land_use_analysis.py' script.\n") + + # dictionary of capacity colum to use by tech (depends on reV format) + capacity_cols = {'upv': 'capacity_mw_ac', 'wind-ons':'capacity_mw'} try: # get path to relevant rev files and switch settings rev_paths = pd.read_csv( - os.path.join(scen_path, "inputs_case", "rev_paths.csv")) + os.path.join(scen_path, "inputs_case", "rev_paths.csv") + ) + except FileNotFoundError: + sys.exit(f"Could not read {os.path.join(scen_path, 'inputs_case', 'rev_paths.csv')}") + + # run land use analysis for each tech + for tech in techs: + try: + getLandUse(scen_path, jsonfile, rev_paths, reedspath, tech, capacity_cols[tech]) + except Exception as err: + print(err) + + print("\nCompleted 'land_use_analysis.py' script.") - # eventual plan is to add function calls for other techs (namely wind-ons) - getLandUse(scen, scen_path, rev_paths, tech="upv") - except Exception as err: - print(err) - print("") +if __name__ == '__main__': - print("Completed 'land_use_analysis.py' script.") + # Argument inputs + parser = argparse.ArgumentParser(description="""This script calculates evaluates land-use implications for + solar buildouts from one or more ReEDS runs. + Requires the 'reeds_to_rev.py' to have been run.""") + parser.add_argument("scenario", help="Folder of ReEDS run (or path to sc file if running with 'area_only' equal to True") + parser.add_argument('--area_only', '-a', action="store_true", + help="Only estimate supply-curve area (no ReEDS build)") + parser.add_argument('--debug', '-d', action="store_true", + help="Turn off log for debugging") + parser.add_argument('--json', '-j', type=str, default='process_categories', + help='Name of json file that sets which land use categories to process') + parser.add_argument('--tech', '-t', type=str, default='all', + choices=['upv', 'wind-ons', 'all'], help='techs to process') + + args = parser.parse_args() + + thispath = os.path.dirname(os.path.realpath(__file__)) + reedspath = os.path.abspath(os.path.join(thispath, "..", "..")) - # debugging - # python postprocessing/land_use/land_use_analysis.py /Users/bsergi/Documents/Projects/Solar-siting/ReEDS-2.0/runs/2022_03_26_AllOptions_sites - # python /Users/bsergi/Documents/Projects/Solar-siting/ReEDS-2.0/postprocessing/bokehpivot/reports/interface_report_model.py "ReEDS 2.0" /Users/bsergi/Documents/Projects/Solar-siting/ReEDS-2.0/runs/2022_03_26_AllOptions_sites all No none /Users/bsergi/Documents/Projects/Solar-siting/ReEDS-2.0/postprocessing/bokehpivot/reports/templates/reeds2/land_use.py one /Users/bsergi/Documents/Projects/Solar-siting/ReEDS-2.0/runs/2022_03_26_AllOptions_sites/outputs/reeds-report-land No + # Set up logger + if args.debug: + print("In debug mode, skipping logging") + else: + site.addsitedir(os.path.join(reedspath,'input_processing')) + from ticker import makelog + log = makelog(scriptname=__file__, logpath=os.path.join(args.scenario,'gamslog.txt')) + + # convert techs to list + if args.tech == "all": + techs = ['upv', 'wind-ons'] + else: + techs = [args.tech] + + if args.area_only: + summarizeSupplyCurve(args.scenario, reedspath, args.json, techs) + else: + runLandUse(args.scenario, reedspath, args.json, techs) diff --git a/postprocessing/plots.py b/postprocessing/plots.py index f0ea42d..189da2c 100644 --- a/postprocessing/plots.py +++ b/postprocessing/plots.py @@ -30,8 +30,8 @@ import os import matplotlib as mpl import matplotlib.pyplot as plt -from matplotlib.ticker import ( - AutoMinorLocator, MultipleLocator, AutoLocator, PercentFormatter) +from matplotlib import patheffects as pe +import cmocean ################### ### Plot formatting @@ -118,15 +118,19 @@ def rainbowmapper(iterable, colormap=None, explicitcolors=False, categorical=Fal plt.cm.tab20(i) for i in [10,11,6,7,2,3,12,13,16,17,4,5,18,19,0,1,8,9,14,15,] ] if colormap is not None: - if type(colormap) is list: + if isinstance(colormap, list): colors=[colormap[i] for i in range(len(iterable))] else: colors=[colormap(i) for i in np.linspace(0,1,len(iterable))] - elif len(iterable) == 1: colors=['C3'] - elif len(iterable) == 2: colors=['C3','C0'] - elif len(iterable) == 3: colors=['C3','C2','C0'] - elif len(iterable) == 4: colors=['C3','C1','C2','C0'] - elif len(iterable) == 5: + elif len(iterable) == 1: + colors=['C3'] + elif len(iterable) == 2: + colors=['C3','C0'] + elif len(iterable) == 3: + colors=['C3','C2','C0'] + elif len(iterable) == 4: + colors=['C3','C1','C2','C0'] + elif len(iterable) == 5: colors=['C3','C1','C2','C0','C4'] elif len(iterable) == 6: colors=['C5','C3','C1','C2','C0','C4'] @@ -225,15 +229,14 @@ def addcolorbarhist( if extend == 'neither': data_hist = data elif extend == 'max': - data_hist = data.clip(max=vmax) + data_hist = data.clip(upper=vmax) elif extend == 'min': - data_hist = data.clip(min=vmin) + data_hist = data.clip(lower=vmin) elif extend == 'both': - data_hist = data.clip(min=vmin, max=vmax) + data_hist = data.clip(lower=vmin, upper=vmax) ######### Add colorbar norm = mpl.colors.Normalize(vmin=vmin, vmax=vmax) - colors = cmap(bins) ### Horizontal orientation if orientation in ['horizontal', 'h']: @@ -244,7 +247,7 @@ def addcolorbarhist( cax = f.add_axes([caxleft, caxbottom, caxwidth, caxheight]) - cb1 = mpl.colorbar.ColorbarBase( + _cb1 = mpl.colorbar.ColorbarBase( cax, cmap=cmap, norm=norm, orientation='horizontal', extend=extend, extendfrac=extendfrac) cax.xaxis.set_ticks_position('bottom') @@ -339,7 +342,7 @@ def addcolorbarhist( cax = f.add_axes([caxleft, caxbottom, caxwidth, caxheight]) - cb1 = mpl.colorbar.ColorbarBase( + _cb1 = mpl.colorbar.ColorbarBase( cax, cmap=cmap, norm=norm, orientation='vertical', extend=extend, extendfrac=extendfrac) cax.yaxis.set_ticks_position('left') @@ -431,22 +434,22 @@ def plot2dhistarray(xdata, ydata, logcolor=True, bins=None, * ax[(1,1)]: Right y-axis histogram """ ### Format inputs - if type(bins) == int: + if isinstance(bins, int): bins = [np.linspace(min(xdata), max(xdata), bins), np.linspace(min(ydata), max(ydata), bins)] elif type(bins) == tuple: - if (type(bins[0]) == int) and (type(bins[1]) == int): + if isinstance(bins[0], int) and isinstance(bins[1], int): bins = [np.linspace(min(xdata), max(xdata), bins[0]), np.linspace(min(ydata), max(ydata), bins[1])] elif (type(bins[0] == np.ndarray) and (type(bins[1]) == np.ndarray)): pass elif type(bins) == np.ndarray: bins = [bins, bins] - elif bins == None: + elif bins is None: bins = [np.linspace(min(xdata), max(xdata), 101), np.linspace(min(ydata), max(ydata), 101)] - if gridspec_kw == None: + if gridspec_kw is None: gridspec_kw = {'height_ratios': [1,6], 'width_ratios': [6,1], 'hspace':0.02, 'wspace': 0.02} ### Procedure @@ -513,11 +516,11 @@ def plotquarthist( * pad, if not None, overwrites histpad and quartpad """ ### Interpret inputs - if flierprops == None: + if flierprops is None: flierprops={'markersize': 2, 'markerfacecolor': 'none', 'markeredgewidth': 0.25, 'markeredgecolor': '0.5'} - if hist_range == None: + if hist_range is None: hist_range = (dfplot.min().min(), dfplot.max().max()) else: assert (len(hist_range)==2 or type(hist_range) in [float, int]) @@ -525,14 +528,14 @@ def plotquarthist( labels = list(dfplot.columns) data_sets = [dfplot[label].dropna().values for label in labels] - if x_locations == None: + if x_locations is None: x_locations = dfplot.columns.values - if any([type(col) == str for col in x_locations]): + if any([isinstance(col, str) for col in x_locations]): x_locations = range(len(x_locations)) - if (pad != None) and (direction == 'right'): + if (pad is not None) and (direction == 'right'): histpad, quartpad = pad, -pad - elif (pad != None) and (direction == 'left'): + elif (pad is not None) and (direction == 'left'): histpad, quartpad = -pad, pad ###### Some shared quantities between quarts and hists @@ -596,9 +599,9 @@ def plotquarthist( assert len(x_locations) == len(scaled_data_sets), "mismatched axes" for i in range(len(data_sets)): ### Set bar color - if type(histcolor) == list: + if isinstance(histcolor, list): c = histcolor[i] - elif type(histcolor) == dict: + elif isinstance(histcolor, dict): c = histcolor[labels[i]] else: c = histcolor @@ -635,8 +638,10 @@ def plotquarthist( ## Botstrapped 95% confidence intervals for median for i in range(len(data_sets)): ### Set median range color - if type(cicolor) == list: c = cicolor[i] - else: c = cicolor + if isinstance(cicolor, list): + c = cicolor[i] + else: + c = cicolor ### Plot median bars ax.plot(np.array([x_locations[i], x_locations[i]]) + (quartpad * xscale), [cilos[i], cihis[i]], @@ -768,7 +773,7 @@ def subplotpercentiles(ax, dfplot, datacolumn, tracecolumn, subplotcolumn=None, if colordict is None: colors = ['C{}'.format(i%10) for i in range(len(tracevals))] colordict = dict(zip(tracevals, colors)) - elif type(colordict) == list: + elif isinstance(colordict, list): colordict = dict(zip(tracevals, colordict)) elif colordict in ['order', 'ordered', 'rainbow', 'sort']: colordict = rainbowmapper(tracevals) @@ -797,7 +802,7 @@ def subplotpercentiles(ax, dfplot, datacolumn, tracecolumn, subplotcolumn=None, color=colordict[traceval], **kwargs) ax.set_xlim(0,100) - ax.xaxis.set_minor_locator(AutoMinorLocator(xdivs)) + ax.xaxis.set_minor_locator(mpl.ticker.AutoMinorLocator(xdivs)) ax.tick_params(axis='both', which='major', direction='in', top=True, right=True, length=4) ax.tick_params(axis='x', which='minor', direction='in', @@ -809,8 +814,7 @@ def subplotpercentiles(ax, dfplot, datacolumn, tracecolumn, subplotcolumn=None, ### Set y limits based on ymin, ymax if ylimits is not None: assert len(ylimits) == 2, 'len(ylimits) must be 2 but is {}'.format(len(ylimits)) - assert type(ylimits[0]) == type(ylimits[1]) - if type(ylimits[0]) == str: + if isinstance(ylimits[0], str) and isinstance(ylimits[1], str): ymin = float(ylimits[0].replace('%','')) * 0.01 ymax = float(ylimits[1].replace('%','')) * 0.01 describe = dfplot[datacolumn].describe(percentiles=[ymin, ymax]) @@ -854,7 +858,7 @@ def plotpercentiles(dfplot, datacolumn, tracecolumn, subplotcolumn=None, if colordict is None: colors = ['C{}'.format(i%10) for i in range(len(tracevals))] colordict = dict(zip(tracevals, colors)) - elif type(colordict) == list: + elif isinstance(colordict, list): colordict = dict(zip(tracevals, colordict)) elif colordict in ['order', 'ordered', 'rainbow', 'sort']: colordict = rainbowmapper(tracevals) @@ -872,7 +876,7 @@ def plotpercentiles(dfplot, datacolumn, tracecolumn, subplotcolumn=None, ax[j].plot(x, y, label=tracevals[i], c=colordict[tracevals[i]]) ax[j].set_xlim(0,100) - ax[j].xaxis.set_minor_locator(AutoMinorLocator(xdivs)) + ax[j].xaxis.set_minor_locator(mpl.ticker.AutoMinorLocator(xdivs)) ax[j].set_title(subplotvals[j], weight='bold', fontname='Arial') ax[j].tick_params( axis='both', which='major', direction='in', @@ -891,7 +895,7 @@ def plotpercentiles(dfplot, datacolumn, tracecolumn, subplotcolumn=None, ax[-1].plot(x, y, label=tracevals[i], c=colordict[tracevals[i]]) ax[-1].set_xlim(0,100) - ax[-1].xaxis.set_minor_locator(AutoMinorLocator(xdivs)) + ax[-1].xaxis.set_minor_locator(mpl.ticker.AutoMinorLocator(xdivs)) ax[-1].set_title('All', weight='bold', fontname='Arial') ax[-1].tick_params( axis='both', which='major', direction='in', @@ -911,7 +915,7 @@ def plotpercentiles(dfplot, datacolumn, tracecolumn, subplotcolumn=None, ax.plot(x, y, label=tracevals[i], c=colordict[tracevals[i]]) ax.set_xlim(0,100) - ax.xaxis.set_minor_locator(AutoMinorLocator(xdivs)) + ax.xaxis.set_minor_locator(mpl.ticker.AutoMinorLocator(xdivs)) ax.tick_params( axis='both', which='major', direction='in', top=True, right=True, length=4) @@ -924,8 +928,7 @@ def plotpercentiles(dfplot, datacolumn, tracecolumn, subplotcolumn=None, ### Set y limits based on ymin, ymax if ylimits is not None: assert len(ylimits) == 2, 'len(ylimits) must be 2 but is {}'.format(len(ylimits)) - assert type(ylimits[0]) == type(ylimits[1]) - if type(ylimits[0]) == str: + if isinstance(ylimits[0], str) and isinstance(ylimits[1], str): ymin = float(ylimits[0].replace('%','')) * 0.01 ymax = float(ylimits[1].replace('%','')) * 0.01 describe = dfplot[datacolumn].describe(percentiles=[ymin, ymax]) @@ -1030,7 +1033,7 @@ def draw_screen_poly(poly, m, def plotusascattermap( dfplot, colorcolumn=None, sizecolumn=None, filterdict=None, sort=True, - basemap=None, cmap=plt.cm.gist_earth_r, mappath=None, + basemap=None, cmap=cmocean.cm.rain, mappath=None, facecolor='w', edgecolor='w', latlonlabels=None, markersize=None, marker='o', figsize=(10,7.5), dpi=None, zrange=None, colors=None, maptype='scatter', contourlevels=100, @@ -1053,9 +1056,9 @@ def plotusascattermap( import geopandas as gpd import shapely ### Set the map bounds based on input - if type(bounds) is dict: + if isinstance(bounds, dict): dictbounds = bounds - elif type(bounds) is list: + elif isinstance(bounds, list): dictbounds = dict(zip( ['lat_1','lat_2','lon_0','lat_0','width','height'], bounds @@ -1124,10 +1127,11 @@ def plotusascattermap( if not basemap: ###### Download the map file if necessary - if (not os.path.exists(mappath)) and (downloadmap == False): + if (not os.path.exists(mappath)) and (downloadmap is False): raise Exception("No file at {}; try setting downloadmap=True".format(mappath)) - if (not os.path.exists(mappath)) and (downloadmap == True): - import urllib.request, zipfile + if (not os.path.exists(mappath)) and (downloadmap is True): + import urllib.request + import zipfile ### Download it url = ('https://opendata.arcgis.com/datasets/bee7adfd918e4393995f64e155a1bbdf_0.zip?' 'outSR=%7B%22wkid%22%3A102100%2C%22latestWkid%22%3A3857%7D') @@ -1188,8 +1192,7 @@ def plotusascattermap( zrangeplot = [datamin, datamax] else: assert len(zrange) == 2, "len(zrange) must be 2 but is {}".format(len(zrange)) - assert type(zrange[0]) == type(zrange[1]) - if type(zrange[0]) == str: + if isinstance(zrange[0], str) and isinstance(zrange[1], str): assert (zrange[0].endswith('%') and zrange[1].endswith('%')), "zrange != %" zmin = float(zrange[0].replace('%','')) * 0.01 zmax = float(zrange[1].replace('%','')) * 0.01 @@ -1211,7 +1214,7 @@ def plotusascattermap( if colorcolumn is not None: if colors is None: colordata = dfmap[colorcolumn].values - elif type(colors) == dict: + elif isinstance(colors, dict): colordata = dfmap[colorcolumn].map(lambda x: colors[x]).values elif colorcolumn is None: if colors is None: @@ -1282,18 +1285,18 @@ def plotusascattermap( # # x,y = m_in([-100],[40]) # m_in.scatter(x, y, color='none', ax=ax) - if (colorbarhist == True) and (colorcolumn is not None): + if (colorbarhist is True) and (colorcolumn is not None): ### Add legend if categorical - if type(colors) is dict: + if isinstance(colors, dict): patchlegend(colors, edgecolor=markeredgecolor, **colorbarkwargs) ### Add hist if not categorial else: - cax = addcolorbarhist( + _cax = addcolorbarhist( f=f, ax0=ax, data=colordata, title=colorbartitle, cmap=cmap, vmin=zrangeplot[0], vmax=zrangeplot[1], # title_fontsize='x-large', **colorbarkwargs) - returncax = True + _returncax = True return f, ax # return f, ((ax, cax) if (returncax and colorbarhist) else ax) @@ -1320,7 +1323,7 @@ def sparkline(ax, dsplot, endlabels=True, ax.set_xticks(xticks) if minordivisions is not None: - ax.xaxis.set_minor_locator(AutoMinorLocator(minordivisions)) + ax.xaxis.set_minor_locator(mpl.ticker.AutoMinorLocator(minordivisions)) ### Annotate the ends if endlabels: @@ -1375,17 +1378,17 @@ def monthifier(x): ax[i].fill_between( dfplot.index, dfplot[plotcols].values, lw=0, alpha=alpha, color=(colors if type(colors) in [str,mpl.colors.ListedColormap] - else ('C0' if colors == None else colors[0]))) + else ('C0' if colors is None else colors[0]))) elif style in ['line', 'l']: ax[i].plot( dfplot.index, dfplot[plotcols].values, lw=lwforline, alpha=alpha, ls=ls, color=(colors if type(colors) in [str,mpl.colors.ListedColormap] - else ('C0' if colors == None else colors[0]))) + else ('C0' if colors is None else colors[0]))) elif isinstance(plotcols, list): if isinstance(colors, str): colors = [colors]*len(plotcols) - elif colors == None: + elif colors is None: colors = ['C{}'.format(i%10) for i in range(len(plotcols))] for j, plotcol in enumerate(plotcols): dfplot = dfs.loc['{} {}'.format(month, dfs.index[0].year)][[plotcol]] @@ -1487,10 +1490,14 @@ def _despine_sub(ax, direction='out'): """ """ - if not top: ax.spines['top'].set_visible(False) - if not right: ax.spines['right'].set_visible(False) - if not left: ax.spines['left'].set_visible(False) - if not bottom: ax.spines['bottom'].set_visible(False) + if not top: + ax.spines['top'].set_visible(False) + if not right: + ax.spines['right'].set_visible(False) + if not left: + ax.spines['left'].set_visible(False) + if not bottom: + ax.spines['bottom'].set_visible(False) ax.tick_params(axis='both', which='both', direction=direction, top=top, right=right, @@ -1549,7 +1556,7 @@ def patchlegend(colors, edgecolor='none', alpha=1, reverse=False, **kwargs): alpha=alpha, label=i) for i in colors.keys()] - if reverse == True: + if reverse is True: leg = plt.legend(handles=patches[::-1], **kwargs) else: leg = plt.legend(handles=patches, **kwargs) @@ -1559,7 +1566,7 @@ def patchlegend(colors, edgecolor='none', alpha=1, reverse=False, **kwargs): def _differentiate_lines_sub(ax, cycle=10, linestyles=['--',':','-.']): """ """ - if type(linestyles) is str: + if isinstance(linestyles, str): ls = [linestyles,linestyles,linestyles] else: ls = linestyles @@ -1635,7 +1642,7 @@ def annotate(ax, label, x, offset, decimals=0, tail='', **kwargs): None """ ### Get the point to plot - lineindex = [l._label for l in ax.get_lines()].index(label) + lineindex = [line._label for line in ax.get_lines()].index(label) try: pointindex = ax.get_lines()[lineindex]._xorig.tolist().index(x) except ValueError: @@ -1654,8 +1661,13 @@ def annotate(ax, label, x, offset, decimals=0, tail='', **kwargs): noteprops[key] = kwargs[key] ### Annotate it + if isinstance(decimals, int): + _decimals = decimals + else: + _decimals = 0 if y < 100 else -1 + ax.annotate( - ('{:.'+str(decimals)+'f}').format(y)+tail, + f'{np.around(y, _decimals):.{max(_decimals, 0)}f}'+tail, xy=(x,y), textcoords='offset points', xytext=offset, @@ -1665,6 +1677,149 @@ def annotate(ax, label, x, offset, decimals=0, tail='', **kwargs): ) +def optimize_label_positions( + ydata, mindistance, ypad=0, extend='above', + msg=False, debug=False, + ): + """Position non-overlapping y labels while minimizing difference from actual values + + Args: + ydata (array): actual y data values for points to label + mindistance (numeric): minimum distance between labels, in data units + ypad (numeric): factor by which to expand the y data bounds to fit the y labels. + If 0, the label positions will all fall between the minimum and maximum values + of the data (or the optimization will be infeasible). + If extend=='above', labels can extend above the data by the specified factor. + If extend=='below', labels can extend below the data by the specified factor. + If extend=='both', labels can extend above and below the data by the specified factor. + For example, if set to -1.5, the highest label can be 25% higher than the + highest y value and the lowest label can be 25% lower. + msg (boolean): whether to print optimizer log + debug (boolean): return full model output as dictionary + """ + if any([np.isnan(i) for i in ydata]): + raise ValueError("There are NaN entries in ydata") + import pulp + m = pulp.LpProblem("OptimizeLabelPositions", pulp.LpMinimize) + + ### Sets + index = range(len(ydata)) + + ### Parameters + if ypad != 0 and extend not in ['above','up','below','down','both']: + raise ValueError(f"extend={extend} but must be in ['above','up','below','down','both']") + yspan = (max(ydata) - min(ydata)) * (1 + ypad) + if (ypad == 0) or (extend in ['above','up']): + ymax = min(ydata) + yspan + ymin = min(ydata) + elif extend in ['below','down']: + ymax = max(ydata) + ymin = max(ydata) - yspan + elif extend == 'both': + ymax = (max(ydata) + min(ydata))/2 + yspan/2 + ymin = (max(ydata) + min(ydata))/2 - yspan/2 + + ### Variables + YLABEL = pulp.LpVariable.dicts( + 'YLABEL', (i for i in index), lowBound=ymin, upBound=ymax, cat='Continuous') + ERROR_POS = pulp.LpVariable.dicts( + 'ERROR_POS', (i for i in index), lowBound=0, cat='Continuous') + ERROR_NEG = pulp.LpVariable.dicts( + 'ERROR_NEG', (i for i in index), lowBound=0, cat='Continuous') + + ### Constraints + for i in index: + for j in index: + if i == j: + continue + ## ylabel positions can't be within mindistance of each other + if ydata[i] >= ydata[j]: + m += (YLABEL[i] - YLABEL[j] >= mindistance) + else: + m += (YLABEL[j] - YLABEL[i] >= mindistance) + + ## Define the error terms + for i in index: + m += ERROR_POS[i] - ERROR_NEG[i] + YLABEL[i] == ydata[i] + + ### Objective: Minimize the sum of absolute values of errors + m += pulp.lpSum([ERROR_POS[i] + ERROR_NEG[i] for i in index]) + + ### Solve it + m.solve(solver=pulp.PULP_CBC_CMD(msg=msg)) + + if m.status != 1: + raise ValueError('optimize_label_positions is infeasible; try a smaller mindistance') + + ylabel = [YLABEL[i].varValue for i in index] + + if debug: + out = { + 'ylabel': ylabel, + 'error_pos': [ERROR_POS[i].varValue for i in index], + 'error_neg': [ERROR_NEG[i].varValue for i in index], + 'm': m, + } + return out + else: + return ylabel + + +def label_last( + dfplot, ax, + mindistance=None, colors=None, extend='below', line=True, + head=' ', tail='', ha='left', path_effects=None, + fontsize='medium', xpad=1, decimals=0, + value=True, name=False, + ): + """ + dfplot: dataframe with years as index and cases as columns + """ + lastyear = dfplot.index[-1] + if colors is None: + colors = dict(zip(dfplot.columns, ['C7']*dfplot.shape[1])) + + vals = dfplot.loc[lastyear].sort_values(ascending=False).rename('val').to_frame() + + if mindistance is None: + mindistance = dfplot.max().max() * 0.033 + _xpad = xpad if ha == 'left' else -xpad + + try: + vals['ylabel'] = optimize_label_positions( + vals.values, mindistance=mindistance, ypad=1000, extend=extend, + ) + except Exception as err: + print(err) + vals['ylabel'] = vals.val + line = False + for case, row in vals.iterrows(): + ## line + if line: + ax.annotate( + '', + xy=(lastyear+_xpad/2, row.val), + xytext=(lastyear+_xpad, row.ylabel), + arrowprops={ + 'arrowstyle':'-', 'shrinkA':0, 'shrinkB':0, + 'color':colors[case], 'lw':0.5}, + annotation_clip=False, + ) + ## label + if isinstance(decimals, int): + _decimals = decimals + else: + _decimals = 0 if row.val < 100 else -1 + val = f'{np.around(row.val, _decimals):.{max(_decimals, 0)}f}' + text = ' '.join([val if value else '', case if name else '']).strip() + ax.annotate( + head+text+tail, + (lastyear+_xpad, row.ylabel), ha=ha, va='center', + color=colors[case], fontsize=fontsize, annotation_clip=False, + path_effects=path_effects, + ) + + def stackbar(df, ax, colors, width=1, net=True, align='center', bottom=0, x0=0, **netargs): """ Inputs @@ -1897,6 +2052,7 @@ def waterfall_span( def plot_region_bars( dfzones, dfdata, colors, ax=None, valscale=3e3, width=5e4, center=False, + zeroline=None, ): """ Inputs @@ -1908,17 +2064,14 @@ def plot_region_bars( valscale: [meters / (units of dfdata)] width: [meters] center: If True, bar center will be at centroid_y; otherwise bar base will be + zeroline: dictionary of kwargs to pass to ax.plot() for a zero line """ - ### Get centroids if necessary - if 'centroid_x' not in dfzones: - dfzones['centroid_x'] = dfzones.centroid.x - dfzones['centroid_y'] = dfzones.centroid.y ### Plot it for r in dfzones.index: if r not in dfdata.index: continue ### Get coordinates - x0, bottom = dfzones.loc[r, ['centroid_x', 'centroid_y']] + x0, bottom = dfzones.loc[r, ['labelx', 'labely']] ### Scale it df = dfdata.loc[r].to_frame().T * valscale df.index = [x0] @@ -1928,3 +2081,118 @@ def plot_region_bars( stackbar( df=df, ax=ax, colors=colors, width=width, net=False, bottom=bottom, ) + if isinstance(zeroline, dict): + ax.plot( + [x0-width/2, x0+width/2], [bottom]*2, + **zeroline, + ) + + +def plot_segmented_arrow( + ax, + reversefrac=0.4, forwardfrac=0.3, + reversewidth=1e5, forwardwidth=2e5, midwidth=3e5, + reversecolor='C3', forwardcolor='C0', midcolor='0.7', + startx=-3e5, endx=3e5, + starty=1e5, endy=-2e5, + alpha=1, zorder=2e6, + headlengthfrac=0.25, headwidthfrac=1, + label='', labelweight='bold', labelfontsize=12, + labelborderlw=2.0, labelborderalpha=0.7, + ): + """Plot a three-segment arrow (usually on a map) + Examples inputs: + label=f"←{75.3257:.0f}%→", + """ + ### Check inputs + if np.around(reversefrac + forwardfrac, -3) > 1: + raise ValueError(f"reversefrac + forwardfrac = {reversefrac + forwardfrac}") + + ### Calculate intermediate values + delx = endx - startx + dely = endy - starty + hypotenuse = (delx**2 + dely**2)**0.5 + midx = (startx + endx) / 2 + midy = (starty + endy) / 2 + anglerad = np.arctan((endy - starty) / (endx - startx)) + angle = anglerad * 180 / np.pi + midfrac = np.around(1 - reversefrac - forwardfrac, -3) + + tailxforward = endx - delx * forwardfrac + tailyforward = endy - dely * forwardfrac + lenxforward = delx * forwardfrac + lenyforward = dely * forwardfrac + + tailxreverse = startx + delx * reversefrac + tailyreverse = starty + dely * reversefrac + lenxreverse = delx * reversefrac + lenyreverse = dely * reversefrac + + ### Draw it + ## Forward + arrow = mpl.patches.FancyArrow( + tailxforward, tailyforward, lenxforward, lenyforward, + width=forwardwidth, + length_includes_head=True, + head_width=forwardwidth*headwidthfrac, + head_length=hypotenuse*forwardfrac*headlengthfrac, + alpha=alpha, + color=forwardcolor, lw=0, + zorder=zorder, + clip_on=False, + ) + ax.add_patch(arrow) + + ## Reverse + arrow = mpl.patches.FancyArrow( + tailxreverse, tailyreverse, -lenxreverse, -lenyreverse, + width=reversewidth, + length_includes_head=True, + head_width=reversewidth*headwidthfrac, + head_length=hypotenuse*reversefrac*headlengthfrac, + alpha=alpha, + color=reversecolor, lw=0, + zorder=zorder, + clip_on=False, + ) + ax.add_patch(arrow) + + ## Middle + if midwidth and (midfrac > 0): + arrow = mpl.patches.FancyArrow( + tailxreverse, tailyreverse, + (tailxforward - tailxreverse), + (tailyforward - tailyreverse), + width=midwidth, + length_includes_head=True, + head_width=midwidth, + head_length=0, + alpha=alpha, + color=midcolor, lw=0, + zorder=zorder, + clip_on=False, + ) + ax.add_patch(arrow) + + ## Label + if len(label): + ax.annotate( + label, + (midx, midy), rotation=angle, + ha='center', va='center', + zorder=1e7, weight=labelweight, fontsize=labelfontsize, + path_effects=[pe.withStroke( + linewidth=labelborderlw, foreground='w', alpha=labelborderalpha)], + ) + + +def wraptext(text, width, fontsize=14): + """ + Inputs + width: Maximum text length in inches + """ + numchars = int(np.around(width / fontsize * 90, 0)) + numlines = len(text) // numchars + bool(len(text) % numchars) + outlist = [text[i*numchars:(i+1)*numchars] for i in range(numlines)] + out = '\n'.join(outlist) + return out diff --git a/postprocessing/plots/scghg_annual.csv b/postprocessing/plots/scghg_annual.csv new file mode 100644 index 0000000..ee46e71 --- /dev/null +++ b/postprocessing/plots/scghg_annual.csv @@ -0,0 +1,186 @@ +# Parent report: https://www.epa.gov/environmental-economics/scghg +# Raw data: https://github.com/USEPA/scghg/blob/main/EPA/output/scghg_annual.csv +gas,emission.year,2.5% Ramsey,2.0% Ramsey,1.5% Ramsey +CO2,2020,117,193,337 +CO2,2021,119,197,341 +CO2,2022,122,200,346 +CO2,2023,125,204,351 +CO2,2024,128,208,356 +CO2,2025,130,212,360 +CO2,2026,133,215,365 +CO2,2027,136,219,370 +CO2,2028,139,223,375 +CO2,2029,141,226,380 +CO2,2030,144,230,384 +CO2,2031,147,234,389 +CO2,2032,150,237,394 +CO2,2033,153,241,398 +CO2,2034,155,245,403 +CO2,2035,158,248,408 +CO2,2036,161,252,412 +CO2,2037,164,256,417 +CO2,2038,167,259,422 +CO2,2039,170,263,426 +CO2,2040,173,267,431 +CO2,2041,176,271,436 +CO2,2042,179,275,441 +CO2,2043,182,279,446 +CO2,2044,186,283,451 +CO2,2045,189,287,456 +CO2,2046,192,291,462 +CO2,2047,195,296,467 +CO2,2048,199,300,472 +CO2,2049,202,304,477 +CO2,2050,205,308,482 +CO2,2051,208,312,487 +CO2,2052,211,315,491 +CO2,2053,214,319,496 +CO2,2054,217,323,500 +CO2,2055,220,326,505 +CO2,2056,222,330,510 +CO2,2057,225,334,514 +CO2,2058,228,338,519 +CO2,2059,231,341,523 +CO2,2060,234,345,528 +CO2,2061,236,348,532 +CO2,2062,239,351,535 +CO2,2063,241,354,539 +CO2,2064,244,357,543 +CO2,2065,246,360,547 +CO2,2066,248,363,550 +CO2,2067,251,366,554 +CO2,2068,253,369,558 +CO2,2069,256,372,562 +CO2,2070,258,375,565 +CO2,2071,261,378,569 +CO2,2072,263,382,573 +CO2,2073,266,385,576 +CO2,2074,269,388,580 +CO2,2075,271,391,583 +CO2,2076,274,394,587 +CO2,2077,276,398,591 +CO2,2078,279,401,594 +CO2,2079,282,404,598 +CO2,2080,284,407,601 +CH4,2020,"1,257","1,648","2,305" +CH4,2021,"1,324","1,723","2,391" +CH4,2022,"1,390","1,799","2,478" +CH4,2023,"1,457","1,874","2,564" +CH4,2024,"1,524","1,950","2,650" +CH4,2025,"1,590","2,025","2,737" +CH4,2026,"1,657","2,101","2,823" +CH4,2027,"1,724","2,176","2,910" +CH4,2028,"1,791","2,252","2,996" +CH4,2029,"1,857","2,327","3,083" +CH4,2030,"1,924","2,403","3,169" +CH4,2031,"2,002","2,490","3,270" +CH4,2032,"2,080","2,578","3,371" +CH4,2033,"2,157","2,666","3,471" +CH4,2034,"2,235","2,754","3,572" +CH4,2035,"2,313","2,842","3,673" +CH4,2036,"2,391","2,929","3,774" +CH4,2037,"2,468","3,017","3,875" +CH4,2038,"2,546","3,105","3,975" +CH4,2039,"2,624","3,193","4,076" +CH4,2040,"2,702","3,280","4,177" +CH4,2041,"2,786","3,375","4,285" +CH4,2042,"2,871","3,471","4,394" +CH4,2043,"2,955","3,566","4,502" +CH4,2044,"3,040","3,661","4,610" +CH4,2045,"3,124","3,756","4,718" +CH4,2046,"3,209","3,851","4,827" +CH4,2047,"3,293","3,946","4,935" +CH4,2048,"3,378","4,041","5,043" +CH4,2049,"3,462","4,136","5,151" +CH4,2050,"3,547","4,231","5,260" +CH4,2051,"3,624","4,320","5,363" +CH4,2052,"3,701","4,409","5,466" +CH4,2053,"3,779","4,497","5,569" +CH4,2054,"3,856","4,586","5,672" +CH4,2055,"3,933","4,675","5,774" +CH4,2056,"4,011","4,763","5,877" +CH4,2057,"4,088","4,852","5,980" +CH4,2058,"4,165","4,941","6,083" +CH4,2059,"4,243","5,029","6,186" +CH4,2060,"4,320","5,118","6,289" +CH4,2061,"4,389","5,199","6,385" +CH4,2062,"4,458","5,280","6,480" +CH4,2063,"4,527","5,361","6,576" +CH4,2064,"4,596","5,442","6,671" +CH4,2065,"4,666","5,523","6,767" +CH4,2066,"4,735","5,604","6,862" +CH4,2067,"4,804","5,685","6,958" +CH4,2068,"4,873","5,765","7,053" +CH4,2069,"4,942","5,846","7,149" +CH4,2070,"5,011","5,927","7,244" +CH4,2071,"5,085","6,013","7,344" +CH4,2072,"5,160","6,099","7,444" +CH4,2073,"5,234","6,184","7,545" +CH4,2074,"5,309","6,270","7,645" +CH4,2075,"5,383","6,355","7,745" +CH4,2076,"5,458","6,441","7,845" +CH4,2077,"5,532","6,527","7,946" +CH4,2078,"5,607","6,612","8,046" +CH4,2079,"5,681","6,698","8,146" +CH4,2080,"5,756","6,783","8,246" +N2O,2020,"35,232","54,139","87,284" +N2O,2021,"36,180","55,364","88,869" +N2O,2022,"37,128","56,590","90,454" +N2O,2023,"38,076","57,816","92,040" +N2O,2024,"39,024","59,041","93,625" +N2O,2025,"39,972","60,267","95,210" +N2O,2026,"40,920","61,492","96,796" +N2O,2027,"41,868","62,718","98,381" +N2O,2028,"42,816","63,944","99,966" +N2O,2029,"43,764","65,169","101,552" +N2O,2030,"44,712","66,395","103,137" +N2O,2031,"45,693","67,645","104,727" +N2O,2032,"46,674","68,895","106,316" +N2O,2033,"47,655","70,145","107,906" +N2O,2034,"48,636","71,394","109,495" +N2O,2035,"49,617","72,644","111,085" +N2O,2036,"50,598","73,894","112,674" +N2O,2037,"51,578","75,144","114,264" +N2O,2038,"52,559","76,394","115,853" +N2O,2039,"53,540","77,644","117,443" +N2O,2040,"54,521","78,894","119,032" +N2O,2041,"55,632","80,304","120,809" +N2O,2042,"56,744","81,714","122,586" +N2O,2043,"57,855","83,124","124,362" +N2O,2044,"58,966","84,535","126,139" +N2O,2045,"60,078","85,945","127,916" +N2O,2046,"61,189","87,355","129,693" +N2O,2047,"62,301","88,765","131,469" +N2O,2048,"63,412","90,176","133,246" +N2O,2049,"64,523","91,586","135,023" +N2O,2050,"65,635","92,996","136,799" +N2O,2051,"66,673","94,319","138,479" +N2O,2052,"67,712","95,642","140,158" +N2O,2053,"68,750","96,965","141,838" +N2O,2054,"69,789","98,288","143,517" +N2O,2055,"70,827","99,612","145,196" +N2O,2056,"71,866","100,935","146,876" +N2O,2057,"72,904","102,258","148,555" +N2O,2058,"73,943","103,581","150,235" +N2O,2059,"74,981","104,904","151,914" +N2O,2060,"76,020","106,227","153,594" +N2O,2061,"76,920","107,385","155,085" +N2O,2062,"77,820","108,542","156,576" +N2O,2063,"78,720","109,700","158,066" +N2O,2064,"79,620","110,857","159,557" +N2O,2065,"80,520","112,015","161,048" +N2O,2066,"81,419","113,172","162,539" +N2O,2067,"82,319","114,330","164,030" +N2O,2068,"83,219","115,487","165,521" +N2O,2069,"84,119","116,645","167,012" +N2O,2070,"85,019","117,802","168,503" +N2O,2071,"86,012","119,027","170,013" +N2O,2072,"87,006","120,252","171,523" +N2O,2073,"87,999","121,477","173,033" +N2O,2074,"88,992","122,702","174,543" +N2O,2075,"89,985","123,926","176,053" +N2O,2076,"90,978","125,151","177,563" +N2O,2077,"91,971","126,376","179,073" +N2O,2078,"92,964","127,601","180,582" +N2O,2079,"93,958","128,826","182,092" +N2O,2080,"94,951","130,050","183,602" diff --git a/postprocessing/reedsplots.py b/postprocessing/reedsplots.py index 69a540d..194beff 100644 --- a/postprocessing/reedsplots.py +++ b/postprocessing/reedsplots.py @@ -3,6 +3,7 @@ import pandas as pd import matplotlib as mpl import matplotlib.pyplot as plt +from matplotlib import patheffects as pe from glob import glob import os import sys @@ -12,6 +13,7 @@ import geopandas as gpd import shapely import h5py +import cmocean os.environ['PROJ_NETWORK'] = 'OFF' reeds_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) @@ -23,10 +25,20 @@ plots.plotparams() site.addsitedir(os.path.join(reeds_path,'input_processing')) from hourly_writetimeseries import h2timestamp, timestamp2h +import hourly_repperiods site.addsitedir(os.path.join(reeds_path,'ReEDS_Augur')) import functions +###### Constants +### Map fiddles +zone_label_offset = { + 'SPP_South': (0.1e6, 0.1e6), + 'MISO_Central': (0.1e6, -0.1e6), + 'MISO_North': (0, -0.1e6), +} + + ### Functions def get_zonemap(case): """ @@ -124,13 +136,14 @@ def get_zonemap(case): def get_dfmap(case): dfba = get_zonemap(case) - dfmap = {} + dfmap = {'r': dfba.copy()} for col in ['interconnect','nercr','transreg','transgrp','st','country']: dfmap[col] = dfba.copy() dfmap[col]['geometry'] = dfmap[col].buffer(0.) dfmap[col] = dfmap[col].dissolve(col) - dfmap[col]['labelx'] = dfmap[col].centroid.x - dfmap[col]['labely'] = dfmap[col].centroid.y + for prefix in ['','label']: + dfmap[col][prefix+'x'] = dfmap[col].centroid.x + dfmap[col][prefix+'y'] = dfmap[col].centroid.y return dfmap @@ -197,8 +210,30 @@ def read_pras_results(filepath): return df +def get_inflatable(inflationpath=None): + """Get an [inyear,outyear] lookup table for inflation""" + inflation = pd.read_csv(inflationpath, index_col='t') + ### Make the single-pair function + def inflatifier(inyear, outyear=2019, inflation=inflation): + if inyear < outyear: + return inflation.loc[inyear+1:outyear,'inflation_rate'].cumprod()[outyear] + elif inyear > outyear: + return 1 / inflation.loc[outyear+1:inyear,'inflation_rate'].cumprod()[inyear] + else: + return 1 + ### Make the output table + inflatable = {} + for inyear in range(1960,2051): + for outyear in range(1960,2051): + inflatable[inyear,outyear] = inflatifier(inyear,outyear) + inflatable = pd.Series(inflatable) + return inflatable + + def plotdiff( - val, casebase, casecomp, onlytechs=None, titleshorten=0, + val, casebase, casecomp, + casebase_name=None, casecomp_name=None, + onlytechs=None, titleshorten=0, yearmin=None, yearmax=2050, colors=None, **plot_kwds, ): @@ -599,13 +634,13 @@ def plotdiff( for col in range(3): ax[col].xaxis.set_visible(False) - ax[0].set_title(os.path.basename(casebase)[titleshorten:], x=0, ha='left', size='large', weight='bold') - ax[1].set_title(os.path.basename(casecomp)[titleshorten:], x=0, ha='left', size='large', weight='bold') + _casebase_name = (casebase_name if casebase_name else os.path.basename(casebase)[titleshorten:]) + _casecomp_name = (casecomp_name if casecomp_name else os.path.basename(casecomp)[titleshorten:]) + ax[0].set_title(_casebase_name, x=0, ha='left', size='large', weight='bold') + ax[1].set_title(_casecomp_name, x=0, ha='left', size='large', weight='bold') for col in [2,3]: ax[col].set_title( - '{}\n– {}'.format( - os.path.basename(casecomp)[titleshorten:], - os.path.basename(casebase)[titleshorten:]), + f'{_casecomp_name}\n– {_casebase_name}', x=0, ha='left', size='large', weight='bold') ax[0].annotate( (tfix.get(val,val) @@ -835,7 +870,8 @@ def plot_trans_onecase( subtract_baseyear=None, show_overlap=True, drawzones=True, show_converters=0.5, crs='ESRI:102008', - thickborders='none', thickness=0.5, drawstates=True, + thickborders='none', thickness=0.5, thickcolor='0.75', + drawstates=True, label_line_capacity=0, crossing_level='r', ): """ @@ -962,11 +998,11 @@ def plot_trans_onecase( ###### Shared ### Boundaries if drawzones: - dfzones.plot(ax=ax, edgecolor='0.5', facecolor='none', lw=0.1) + dfzones.plot(ax=ax, edgecolor='0.8', facecolor='none', lw=0.1) if drawstates: dfstates.plot(ax=ax, edgecolor='0.25', facecolor='none', lw=0.2) if thickborders not in [None,'','none','None',False]: - dfthick.plot(ax=ax, edgecolor='0.75', facecolor='none', lw=thickness) + dfthick.plot(ax=ax, edgecolor=thickcolor, facecolor='none', lw=thickness) ax.axis('off') ### Labels if pcalabel: @@ -1074,7 +1110,7 @@ def plot_trans_onecase( ax.annotate( '10 GW', (-1.75e6, -1.0e6), ha='center', va='top', weight='bold', fontsize=scalesize) - return f,ax + return f, ax, dfplot def plotdiffmaps(val, i_plot, year, casebase, casecomp, reeds_path, @@ -1254,7 +1290,7 @@ def plotdiffmaps(val, i_plot, year, casebase, casecomp, reeds_path, 0.1, ) - dfplot.plot(ax=ax, column=valcol+'_diff', cmap=plt.cm.bwr, legend=True, + dfplot.plot(ax=ax, column=valcol+'_diff', cmap=plt.cm.RdBu_r, legend=True, vmin=-zlim, vmax=+zlim, legend_kwds=legend_kwds) ### Finish and return @@ -1267,7 +1303,7 @@ def plotdiffmaps(val, i_plot, year, casebase, casecomp, reeds_path, def plot_trans_vsc( case, year=2050, wscale=0.4, alpha=1.0, miles=300, - cmap=plt.cm.gist_earth_r, scale=True, title=True, + cmap=cmocean.cm.rain, scale=True, title=True, convertermax=None, f=None, ax=None, ): @@ -1363,24 +1399,57 @@ def plot_trans_vsc( def plot_transmission_utilization( case, year=2050, plottype='mean', network='trans', - wscale=0.0004, alpha=1.0, cmap=plt.cm.gist_earth_r, + wscale=0.0004, alpha=1.0, cmap=cmocean.cm.rain, extent='modeled', + f=None, ax=None, + thicklevel='transreg', ): """ # Inputs extent: 'modeled' for modeled area, 'usa' for contiguous US """ + sw = pd.read_csv( + os.path.join(case, 'inputs_case', 'switches.csv'), + header=None, index_col=0).squeeze(1) ### Get the output data - if ('h2' in network.lower()) or ('hydrogen' in network.lower()): + add_other_direction = False + hcol = 'h' + if any([k in network.lower() for k in ['h2','hydrogen']]): dftrans = { 'trans_cap': pd.read_csv(os.path.join(case,'outputs','h2_trans_cap.csv')), 'trans_flow_power': pd.read_csv(os.path.join(case,'outputs','h2_trans_flow.csv')), } + title = f'{plottype.title()} utilization [fraction]' + add_other_direction = True + elif any([k in network.lower() for k in ['stress','prm','ra','peak','resource adequacy']]): + dftrans = { + 'trans_cap': pd.read_csv(os.path.join(case,'outputs','tran_cap_prm.csv')), + } + if int(sw.GSw_PRM_CapCredit): + dftrans['trans_flow_power'] = pd.read_csv(os.path.join(case,'outputs','captrade.csv')) + hcol = 'ccseason' + else: + dftrans['trans_flow_power'] = pd.read_csv(os.path.join(case,'outputs','tran_flow_stress.csv')) + title = f'{plottype.title()} utilization\nfor RA [fraction]' else: dftrans = { 'trans_cap': pd.read_csv(os.path.join(case,'outputs','tran_out.csv')), 'trans_flow_power': pd.read_csv(os.path.join(case,'outputs','tran_flow_power.csv')), } + title = f'{plottype.title()} utilization [fraction]' + add_other_direction = True + if add_other_direction: + ## Include both directions + dftrans['trans_cap'] = pd.concat([ + dftrans['trans_cap'], + dftrans['trans_cap'].rename(columns={'r':'rr','rr':'r'}), + ], axis=0, ignore_index=True) + else: + ## Reorient negative values + dftrans['trans_flow_power'].loc[dftrans['trans_flow_power'].Value < 0, ['r','rr']] = ( + dftrans['trans_flow_power'].loc[dftrans['trans_flow_power'].Value < 0, ['rr','r']].values + ) + dftrans['trans_flow_power'].Value = dftrans['trans_flow_power'].Value.abs() val_r = pd.read_csv( os.path.join(case,'inputs_case','val_r.csv'), header=None, ).squeeze(1).values.tolist() @@ -1393,12 +1462,12 @@ def plot_transmission_utilization( .groupby([c for c in dftrans[data] if c not in ['Value']], as_index=False) .sum() .drop('t', axis=1) - ).rename(columns={'allh':'h','Value':'Val','MW':'Val'}) + ).rename(columns={'allh':'h', 'Value':'Val', 'MW':'Val', hcol:'h'}) ### Get utilization by timeslice utilization = dftrans['trans_flow_power'].merge( dftrans['trans_cap'], on=['r','rr','trtype'], suffixes=('_flow','_cap'), - how='outer' - ).fillna(0) + how='right', + ).fillna({'Val_flow':0}) utilization['fraction'] = utilization.Val_flow.abs() / utilization.Val_cap ### Get annual fractional utilization ## First try the hourly version; if it doesn't exist load the h17 version @@ -1411,14 +1480,14 @@ def plot_transmission_utilization( os.path.join(case,'inputs_case','numhours.csv'), header=0, names=['h','hours'], index_col='h').squeeze(1) utilization['Valh'] = utilization.apply( - lambda row: hours[row.h] * abs(row.Val_flow), + lambda row: hours.get(row.get('h', 1), 1) * abs(row.Val_flow), axis=1 ) utilization_annual = ( utilization.groupby(['r','rr','trtype']).Valh.sum() .divide(dftrans['trans_cap'].set_index(['r','rr','trtype']).Val) .fillna(0).rename('fraction') - / 8760 + / utilization.h.drop_duplicates().map(lambda x: hours.get(x, 1)).sum() ).reset_index() ###### Plot max utilization @@ -1427,11 +1496,17 @@ def plot_transmission_utilization( 'mean': utilization_annual, } dfplot = dfplots[plottype.lower()] + ### Only keep the direction with the higher fraction, since it presumably drives investment + ### (if there is any) + dfplot.loc[dfplot.r > dfplot.rr, ['r','rr']] = dfplot.loc[dfplot.r > dfplot.rr, ['rr','r']].values + dfplot = dfplot.sort_values('fraction').drop_duplicates(subset=['r','rr'], keep='last') + if (dfplot.fraction > 1.01).any(): + raise Exception(f'Fractions greater than 1: {dfplot.loc[dfplot.fraction > 1]}') ### Load geographic data dfba = get_zonemap(case) if extent.lower() not in ['usa','full','nation','us','country','all']: dfba = dfba.loc[val_r] - dfstates = dfba.dissolve('st') + dfmap = get_dfmap(case) ### Plot it dfplot = dfplot.merge(dftrans['trans_cap'], on=['r','rr','trtype'], how='left') dfplot['r_x'] = dfplot.r.map(dfba.x) @@ -1439,10 +1514,13 @@ def plot_transmission_utilization( dfplot['rr_x'] = dfplot.rr.map(dfba.x) dfplot['rr_y'] = dfplot.rr.map(dfba.y) ### Plot it - plt.close() - f,ax = plt.subplots(figsize=(14,8)) - dfba.plot(ax=ax, edgecolor='0.5', facecolor='none', lw=0.1) - dfstates.plot(ax=ax, edgecolor='0.25', facecolor='none', lw=0.2) + if (f is None) and (ax is None): + plt.close() + f,ax = plt.subplots(figsize=(14,8)) + dfba.plot(ax=ax, edgecolor='0.75', facecolor='none', lw=0.1) + if thicklevel: + dfmap[thicklevel].plot(ax=ax, edgecolor='C7', facecolor='none', lw=0.2) + dfmap['interconnect'].plot(ax=ax, edgecolor='k', facecolor='none', lw=0.3) for i, row in dfplot.iterrows(): ax.plot( [row['r_x'], row['rr_x']], [row['r_y'], row['rr_y']], @@ -1451,17 +1529,15 @@ def plot_transmission_utilization( ) plots.addcolorbarhist( f=f, ax0=ax, data=dfplot.fraction.values, - title=f'{plottype.title()} utilization [fraction]', cmap=cmap, - vmin=0., vmax=1., - orientation='horizontal', labelpad=2.25, histratio=1., + title=title, cmap=cmap, + vmin=0., vmax=1., nbins=51, + orientation='horizontal', labelpad=2.5, histratio=1., cbarwidth=0.025, cbarheight=0.25, cbarbottom=0.15, cbarhoffset=-0.8, ) - ax.annotate( - '{} ({})'.format(os.path.basename(case), year), - (0.1,1), xycoords='axes fraction', fontsize=10) + ax.set_title(f'{os.path.basename(case)} ({year})', fontsize=10) ax.axis('off') - return f,ax + return f, ax, dfplot def plot_vresites_transmission( @@ -1476,7 +1552,7 @@ def plot_vresites_transmission( label={'wind-ons':'Land-based wind [GW]', 'upv':'Photovoltaics [GW]', 'wind-ofs':'Offshore wind [GW]'}, - vmax={'upv':4.303, 'wind-ons':0.4, 'wind-ofs':0.6}, + vmax={'upv':0.4, 'wind-ons':0.4, 'wind-ofs':0.6}, trans_scale=True, show_transmission=True, title=True, @@ -1538,7 +1614,7 @@ def plot_vresites_transmission( plots.addcolorbarhist( f=f, ax0=ax, data=dfplot.GW.values, title=legend_kwds['label'], cmap=cm[tech], - vmin=0., vmax=vmax[tech], + vmin=0., vmax=vmax[tech], nbins=101, orientation='horizontal', labelpad=2.25, cbarbottom=-0.01, histratio=1., cbarwidth=0.025, cbarheight=0.25, cbarhoffset=cbarhoffset[tech], ) @@ -1586,9 +1662,8 @@ def plot_vresites_transmission( def plot_prmtrade( - case, year=2050, + case, year=2050, vmax=10e3, cm=plt.cm.inferno_r, wscale=7, alpha=0.8, - crs='ESRI:102008', f=None, ax=None, dpi=150, ): """ @@ -1628,7 +1703,9 @@ def plot_prmtrade( ### Get the BA map dfba = get_zonemap(os.path.join(case)) ## Downselect to modeled regions - val_r = list(set(dfplot.r.unique().tolist() + dfplot.rr.unique().tolist())) + val_r = pd.read_csv( + os.path.join(case,'inputs_case','val_r.csv'), header=None, + ).squeeze(1).values.tolist() dfba = dfba.loc[val_r].copy() ## Aggregate to states dfstates = dfba.dissolve('st') @@ -1643,10 +1720,10 @@ def plot_prmtrade( dfba['y'] = dfba.index.map(endpoints.y) ### Get scaling and layout - maxflow = dfplot.MW.abs().max() + _vmax = dfplot.MW.abs().max() if vmax in [None, 0, 0.] else vmax if int(sw.get('GSw_PRM_CapCredit', 1)): ccseasons = pd.read_csv( - os.path.join(reeds_path,'inputs','variability','h_dt_szn.csv') + os.path.join(case,'inputs_case','h_dt_szn.csv') ).ccseason.unique() else: ccseasons = dfplot.ccseason.sort_values().unique() @@ -1680,7 +1757,7 @@ def plot_prmtrade( length_includes_head=True, head_width=abs(lineflow)*wscale*2., head_length=abs(lineflow)*wscale*1.0, - lw=0, color=cm(abs(lineflow)/maxflow), alpha=alpha, + lw=0, color=cm(abs(lineflow)/_vmax), alpha=alpha, ## Plot the primary direction on bottom since it's thicker zorder=(1e6 if primary_direction else 2e6), clip_on=False, @@ -2982,29 +3059,9 @@ def map_transmission_lines( return f,ax -def map_zone_capacity( - case, year=2050, valscale=3e3, width=7e4, - center=True, linealpha=0.6, - scale=10, sideplots=True, legend=True, - f=None, ax=None, - ): +def get_colors(reeds_path): """ - Inputs - ------ - scale: [float] scalebar size in GW; if zero, don't plot scalebar - sideplots: Nationwide tranmsission, emissions, generation, and generation capacity """ - ###### Shared inputs - sw = pd.read_csv( - os.path.join(case, 'inputs_case', 'switches.csv'), - header=None, index_col=0).squeeze(1) - years = pd.read_csv( - os.path.join(case,'inputs_case','modeledyears.csv')).columns.astype(int).values - yearstep = years[-1] - years[-2] - # tech_map = pd.read_csv( - # os.path.join( - # reeds_path,'postprocessing','bokehpivot','in','reeds2','tech_map.csv'), - # index_col='raw').squeeze(1) bokehcolors = pd.read_csv( os.path.join( reeds_path,'postprocessing','bokehpivot','in','reeds2','tech_style.csv'), @@ -3012,8 +3069,8 @@ def map_zone_capacity( bokehcolors = pd.concat([ bokehcolors.loc['smr':'electrolyzer'], pd.Series('#D55E00', index=['dac'], name='color'), - bokehcolors.loc[:'Canada'], - ]) + bokehcolors.loc[:'smr'], + ]).reset_index().drop_duplicates().set_index('index').color bokehcolors['canada'] = bokehcolors['Canada'] trtype_map = pd.read_csv( @@ -3028,57 +3085,111 @@ def map_zone_capacity( transcolors['LCC'] = transcolors['lcc'] transcolors['VSC'] = transcolors['vsc'] - dfba = get_zonemap(case) - dfba['centroid_x'] = dfba.centroid.x - dfba['centroid_y'] = dfba.centroid.y + return bokehcolors, transcolors - hierarchy = pd.read_csv( - os.path.join(case,'inputs_case','hierarchy.csv') - ).rename(columns={'*r':'r'}).set_index('r') - for col in hierarchy: - dfba[col] = dfba.index.map(hierarchy[col]) - hierarchy = hierarchy.loc[hierarchy.country=='USA'].copy() - dfmap = get_dfmap(case) +def get_gen_capacity(case, year=2050, level='r', units='GW'): + """Get zonal generation capacity from a ReEDS run. + Returns: + Dataframe with index=zones and columns=techs + """ + ### Check inputs + assert units in ['MW','GW','TW'] - ###### Case inputs - ### Capacity - dfcap_in = pd.read_csv( - os.path.join(case,'outputs','cap.csv'), - names=['i','r','t','MW'], header=0, - ) + ### Get data + try: + hierarchy = pd.read_csv( + os.path.join(case,'inputs_case','hierarchy.csv') + ).rename(columns={'*r':'r'}).set_index('r') + except (NotADirectoryError, FileNotFoundError): + hierarchy = pd.read_csv( + os.path.join(case.split(os.sep+'outputs')[0],'inputs_case','hierarchy.csv') + ).rename(columns={'*r':'r'}).set_index('r') + + bokehcolors, transcolors = get_colors(reeds_path) + + try: + dfcap_in = pd.read_csv( + os.path.join(case,'outputs','cap.csv'), + names=['i','r','t','MW'], header=0, + ) + except (NotADirectoryError, FileNotFoundError): + dfcap_in = pd.read_csv(case, names=['i','r','t','MW'], header=0) dfcap_in.i = simplify_techs(dfcap_in.i) dfcap = dfcap_in.loc[ (dfcap_in.t==year) - ].rename(columns={'MW':'GW'}).groupby(['i','r']).GW.sum().unstack('i') / 1e3 + ].groupby(['i','r']).MW.sum().unstack('i') + if units != 'MW': + dfcap *= {'GW':1e-3, 'TW':1e-6}[units] dfcap = ( dfcap[[c for c in bokehcolors.index if c in dfcap]] .drop('electrolyzer', axis=1, errors='ignore') ).copy() + ## Aggregate if necessary + if level != 'r': + dfcap.index = dfcap.index.map(hierarchy[level]) + dfcap = dfcap.groupby(axis=0, level='r').sum() - tran_out = pd.read_csv( - os.path.join(case,'outputs','tran_out.csv') - ).rename(columns={'Value':'MW'}) + return dfcap - if (f is None) and (ax is None): - plt.close() - f,ax = plt.subplots(figsize=(12, 9)) # (10, 6.88) - ### Background - dfba.plot(ax=ax, facecolor='none', edgecolor='0.8', lw=0.2) - dfmap['st'].plot(ax=ax, facecolor='none', edgecolor='0.7', lw=0.4) - dfmap['interconnect'].plot(ax=ax, facecolor='none', edgecolor='k', lw=1) - ### Plot transmission +def get_trans_capacity(case, year=2050, level='r', units='GW'): + """Get zonal transmission capacity from a ReEDS run. + Returns: + Geodataframe with index=interfaces and geometry=linestrings + """ + ### Check inputs + assert units in ['MW','GW','TW'] + + ### Get data + try: + hierarchy = pd.read_csv( + os.path.join(case,'inputs_case','hierarchy.csv') + ).rename(columns={'*r':'r'}).set_index('r') + except (NotADirectoryError, FileNotFoundError): + hierarchy = pd.read_csv( + os.path.join(case.split(os.sep+'outputs')[0],'inputs_case','hierarchy.csv') + ).rename(columns={'*r':'r'}).set_index('r') + + try: + dfmap = get_dfmap(case) + except (NotADirectoryError, FileNotFoundError): + dfmap = get_dfmap(case.split(os.sep+'outputs')[0]) + + for r in zone_label_offset: + if r in dfmap[level].index: + dfmap[level].loc[r, 'labelx'] += zone_label_offset[r][0] + dfmap[level].loc[r, 'labely'] += zone_label_offset[r][1] + + try: + tran_out = pd.read_csv( + os.path.join(case,'outputs','tran_out.csv') + ).rename(columns={'Value':'MW'}) + except (NotADirectoryError, FileNotFoundError): + tran_out = pd.read_csv(case).rename(columns={'Value':'MW'}) + transmap = tran_out.loc[ (tran_out.t==year) - ][['r','rr','trtype','MW']].rename(columns={'MW':'GW'}).copy() - transmap.GW /= 1e3 + ][['r','rr','trtype','MW']].copy() + if units != 'MW': + transmap[units] = transmap.MW * {'GW':1e-3, 'TW':1e-6}[units] + ## Aggregate if necessary + if level != 'r': + transmap.r = transmap.r.map(hierarchy[level]) + transmap.rr = transmap.rr.map(hierarchy[level]) + for i, row in transmap.iterrows(): + if row.r > row.rr: + transmap.loc[i,['r','rr']] = transmap.loc[i,['rr','r']].values + transmap = transmap.loc[ + transmap.r != transmap.rr + ].groupby(['r','rr','trtype'], as_index=False)[units].sum() + transmap.index = (transmap.r + '|' + transmap.rr).rename('interface') ## Add geographic data to the line capacity - key = dfba[['centroid_x','centroid_y']] + key = dfmap[level][['labelx','labely']] for col in ['r','rr']: for i in ['x','y']: transmap[f'{col}_{i}'] = transmap.apply( - lambda row: key.loc[row[col]]['centroid_'+i], axis=1) + lambda row: key.loc[row[col]]['label'+i], axis=1) ## Convert the line capacity dataframe to a geodataframe with linestrings transmap['geometry'] = transmap.apply( @@ -3087,11 +3198,63 @@ def map_zone_capacity( ) transmap = gpd.GeoDataFrame(transmap).set_crs('ESRI:102008') + return transmap + + +def map_zone_capacity( + case, year=2050, level='r', + valscale=3e3, width=7e4, + center=True, linealpha=0.6, + scale=10, sideplots=True, legend=True, + f=None, ax=None, + drawstates=True, drawinterconnects=True, + ): + """ + Inputs + ------ + scale: [float] scalebar size in GW; if zero, don't plot scalebar + sideplots: Nationwide tranmsission, emissions, generation, and generation capacity + """ + ###### Shared inputs + sw = pd.read_csv( + os.path.join(case, 'inputs_case', 'switches.csv'), + header=None, index_col=0).squeeze(1) + + years = pd.read_csv( + os.path.join(case,'inputs_case','modeledyears.csv')).columns.astype(int).values + yearstep = years[-1] - years[-2] + + bokehcolors, transcolors = get_colors(reeds_path) + + dfmap = get_dfmap(case) + for r in zone_label_offset: + if r in dfmap[level].index: + dfmap[level].loc[r, 'labelx'] += zone_label_offset[r][0] + dfmap[level].loc[r, 'labely'] += zone_label_offset[r][1] + + ###### Case inputs + dfcap = get_gen_capacity(case=case, year=year, level=level, units='GW') + + transmap = get_trans_capacity(case=case, year=year, level=level, units='GW') ## Buffer the lines into polygons transmap['geometry'] = transmap.apply( lambda row: row.geometry.buffer(row.GW*valscale/2), axis=1) - ## Plot it + ###### Plot it + if (f is None) and (ax is None): + plt.close() + f,ax = plt.subplots(figsize=(12, 9)) + ### Background + if level == 'r': + dfmap[level].plot(ax=ax, facecolor='none', edgecolor='0.8', lw=0.2) + else: + dfmap[level].plot(ax=ax, facecolor='none', edgecolor='0.3', lw=0.4) + if drawstates: + dfmap['st'].plot(ax=ax, facecolor='none', edgecolor='0.7', lw=0.4) + if drawinterconnects: + dfmap['interconnect'].plot(ax=ax, facecolor='none', edgecolor='k', lw=1) + + ### Plot transmission capacity for trtype in ['AC','B2B','LCC','VSC']: if trtype in transmap.trtype.unique(): transmap.loc[transmap.trtype==trtype].dissolve().plot( @@ -3100,10 +3263,13 @@ def map_zone_capacity( ### Plot generation capacity plots.plot_region_bars( - dfzones=dfba, dfdata=dfcap, colors=bokehcolors, - ax=ax, valscale=valscale, width=width, center=center) + dfzones=dfmap[level], dfdata=dfcap, colors=bokehcolors, + ax=ax, valscale=valscale, width=width, center=center, + # zeroline={'c':'k', 'ls':':', 'lw':'0.5'}, + ) - ### Add a scale bar + ### Formatting + ax.axis('off') if scale: ax.bar( x=[-1.8e6], height=[valscale * scale], bottom=[-1.05e6], width=3.0e5, @@ -3258,9 +3424,6 @@ def map_zone_capacity( handletextpad=0.3, handlelength=0.7, columnspacing=0.5, ) - ### Formatting - ax.axis('off') - return f, ax, eax @@ -3429,7 +3592,7 @@ def map_hybrid_pv_wind( case, val='site_cap', year=2050, tech=None, vmax=None, markersize=10.75, #stretch=1.2, - cmap=plt.cm.gist_earth_r, + cmap=cmocean.cm.rain, f=None, ax=None, figsize=(6,6), dpi=None, ): """ @@ -3437,7 +3600,7 @@ def map_hybrid_pv_wind( cmap: Suggestions: val=site_cap, tech=wind-ons: plt.cm.Blues val=site_cap, tech=upv: plt.cm.Oranges - val=(site_hybridization,site_spurcap), tech=None: plt.cm.gist_earth_r + val=(site_hybridization,site_spurcap), tech=None: cmocean.cm.rain val=(site_pv_fraction,site_gir), tech=either: plt.cm.turbo or mpl.colors.LinearSegmentedColormap.from_list 'turboclip', [plt.cm.turbo(c) for c in np.linspace(0.1,0.91,101)]) @@ -3894,7 +4057,7 @@ def plot_stressperiod_days(case, repcolor='k', sharey=False, figsize=(10,5)): ] colors = plots.rainbowmapper(range(2007,2014)) rep = f"rep ({sw.GSw_HourlyWeatherYears.replace('_',',')})" - colors[rep] = 'k' + colors[rep] = repcolor t2periods = gen_h_stress.groupby('t').szn.unique() t2starts = t2periods.map( @@ -3903,7 +4066,10 @@ def plot_stressperiod_days(case, repcolor='k', sharey=False, figsize=(10,5)): # load = pd.read_hdf(os.path.join(case,'inputs_case','load.h5')).sum(axis=1) ## Use same procedure as dfpeak and G_plots.plot_e_netloadhours_timeseries() for t in years: - dictout = {rep: dfrep} + if repcolor in ['none', None, False]: + dictout = {} + else: + dictout = {rep: dfrep} for y in range(2007,2014): yearstarts = [i for i in t2starts[t] if i.year == y] yearstarts_aligned = [ @@ -3939,11 +4105,14 @@ def plot_stressperiod_days(case, repcolor='k', sharey=False, figsize=(10,5)): ax[row].annotate( t,(0.005,0.95),xycoords='axes fraction',ha='left',va='top', weight='bold',fontsize='large') - ax[row].set_ylim(0) + if not sharey: + ax[row].set_ylim(0) ax[row].yaxis.set_major_locator(mpl.ticker.MultipleLocator(1)) ax[row].yaxis.set_major_formatter(plt.NullFormatter()) ### Formatting + if sharey: + ax[0].set_ylim(0) ylabel = ( 'Modeled stress periods' if repcolor in ['none', None, False] else 'Modeled periods (representative + stress)' @@ -3954,7 +4123,10 @@ def plot_stressperiod_days(case, repcolor='k', sharey=False, figsize=(10,5)): return f, ax -def plot_stressperiod_evolution(case, level='transgrp', metric='sum', threshold=10): +def plot_stressperiod_evolution( + case, level='transgrp', metric='sum', threshold=10, + figsize=None, scale_widths=False, + ): """Plot NEUE by year and stress period iteration""" ### Load NEUE results sw = pd.read_csv( @@ -3984,8 +4156,15 @@ def plot_stressperiod_evolution(case, level='transgrp', metric='sum', threshold= regions = dfplot.columns colors = plots.rainbowmapper(regions) ### Plot it + if figsize is None: + figsize = (max(10, ncols*1.2), 3) + width_ratios = dfstress.reset_index().groupby('year').iteration.max().loc[years].values + 1 + gridspec_kw = {'width_ratios':width_ratios} if scale_widths else None plt.close() - f,ax = plt.subplots(1, ncols, sharey=True, figsize=(max(10,ncols*1.2),3)) + f,ax = plt.subplots( + 1, ncols, sharey=True, figsize=figsize, + gridspec_kw=gridspec_kw, + ) for col, year in enumerate(years): df = dfplot.loc[year] for region in regions: @@ -3997,6 +4176,7 @@ def plot_stressperiod_evolution(case, level='transgrp', metric='sum', threshold= ax[col].set_title(year) ax[col].xaxis.set_major_locator(mpl.ticker.MultipleLocator(1)) ax[col].axhline(threshold, lw=0.75, ls='--', c='0.7', zorder=-1e6) + ax[col].set_xlim(-0.25, len(df)-0.75) ## Annotate the stress periods periods = dfstress.loc[year].drop_duplicates() note = '\n___________\n'.join([ @@ -4109,10 +4289,76 @@ def plot_neue_bylevel( return f, ax, dfin_neue +def map_neue( + case, year=2050, iteration='last', samples=None, metric='sum', + vmax=10., cmap=cmocean.cm.rain, label=True, + ): + """ + """ + ### Parse inputs + assert metric in ['sum','max'] + + ### Get data + if iteration == 'last': + _, _iteration = get_last_iteration( + case=case, year=year, samples=samples) + else: + _iteration = iteration + infile = os.path.join(case, 'outputs', f'neue_{year}i{_iteration}.csv') + neue = pd.read_csv(infile) + neue = neue.loc[neue.metric==metric].set_index(['level','region']).NEUE_ppm + + ### Set up plot + levels = ['interconnect','nercr','transreg','transgrp','st','r'] + nrows, ncols, coords = plots.get_coordinates(levels, aspect=1.3) + dfmap = get_dfmap(case) + + ### Plot it + plt.close() + f,ax = plt.subplots( + nrows, ncols, figsize=(13.33, 6.88), sharex=True, sharey=True, + gridspec_kw={'hspace':-0.05, 'wspace':-0.05}, + ) + for level in levels: + ## Background + dfmap['country'].plot( + ax=ax[coords[level]], facecolor='none', edgecolor='k', lw=1.0, zorder=1e7) + dfmap[level].plot( + ax=ax[coords[level]], facecolor='none', edgecolor='k', lw=0.1, zorder=1e6) + ax[coords[level]].set_title(level, y=0.9, weight='bold') + ## Data + df = dfmap[level].copy() + df['NEUE_ppm'] = neue[level] + df.plot(ax=ax[coords[level]], column='NEUE_ppm', cmap=cmap, vmin=0, vmax=vmax) + ## Labels + # decimals = (0 if df.NEUE_ppm.max() >= 10 else 1) + decimals = (0 if level in ['st','r'] else 1) + for r, row in df.sort_values('NEUE_ppm').iterrows(): + ax[coords[level]].annotate( + f"{row.NEUE_ppm:.{decimals}f}", + [row.labelx, row.labely], + ha='center', va='center', c='k', + fontsize={'r':5}.get(level,7), + path_effects=[pe.withStroke(linewidth=1.5, foreground='w', alpha=0.7)], + ) + ### Formatting + plots.addcolorbarhist( + f=f, ax0=ax[coords[level]], data=df.NEUE_ppm, + cmap=cmap, vmin=0., vmax=vmax, histratio=0, extend='max', + cbarbottom=0.525, cbarheight=0.9, + + ) + for row in range(nrows): + for col in range(ncols): + ax[row,col].axis('off') + + return f, ax, neue, _iteration + + def map_h2_capacity( - case, year=2050, wscale_h2=3, figheight=6, pipescale=10, + case, year=2050, wscale_h2=10, figheight=6, pipescale=0.1, legend_kwds={'shrink':0.6, 'pad':0, 'orientation':'horizontal', 'aspect':12}, - cmap=plt.cm.gist_earth_r, + cmap=cmocean.cm.rain, ): """ H2 turbines, production (electrolyzer/SMR), pipelines, and storage @@ -4372,6 +4618,26 @@ def plot_h2_timeseries( return f, ax +def get_last_iteration(case, year=2050, datum=None, samples=None): + """Get the last iteration of PRAS for a given case/year""" + if datum not in [None,'flow','energy']: + raise ValueError(f"datum must be in [None,'flow','energy'] but is {datum}") + infile = sorted(glob( + os.path.join( + case, 'ReEDS_Augur', 'PRAS', + f"PRAS_{year}i*" + + (f'-{samples}' if samples is not None else '') + + (f'-{datum}' if datum is not None else '') + + '.h5' + ) + ))[-1] + iteration = int( + os.path.splitext(os.path.basename(infile))[0] + .split('-')[0].split('_')[1].split('i')[1] + ) + return infile, iteration + + def plot_interface_flows( case, year=2050, source='pras', iteration='last', samples=None, @@ -4387,13 +4653,8 @@ def plot_interface_flows( ).rename(columns={'*r':'r'}).set_index('r') if source.lower() == 'pras': - infile = sorted(glob( - os.path.join( - case, 'ReEDS_Augur', 'PRAS', - f"PRAS_{year}i{'*' if iteration in ['last','latest','final'] else iteration}" - + (f'-{samples}' if samples is not None else '') - + '-flow.h5') - ))[-1] + infile, _iteration = get_last_iteration( + case=case, year=year, datum='flow', samples=samples) dfflow = read_pras_results(infile).set_index(fulltimeindex) ## Filter out AC/DC converters from scenarios with VSC dfflow = dfflow[[c for c in dfflow if '"DC_' not in c]].copy() @@ -4507,7 +4768,7 @@ def plot_interface_flows( def plot_storage_soc( case, year=2050, - source='pras', iteration='last', samples=None, + source='pras', samples=None, level='transgrp', onlydata=False, ): @@ -4519,14 +4780,8 @@ def plot_storage_soc( ### Get storage state of charge if source.lower() == 'pras': - infile = sorted(glob( - os.path.join( - case, 'ReEDS_Augur', 'PRAS', - f"PRAS_{year}i{'*' if iteration in ['last','latest','final'] else iteration}" - + (f'-{samples}' if samples is not None else '') - + '-energy.h5') - ))[-1] - _iteration = int(os.path.basename(infile).split('-')[0].split('_')[1].split('i')[1]) + infile, _iteration = get_last_iteration( + case=case, year=year, datum='energy', samples=samples) dfenergy = read_pras_results(infile).set_index(fulltimeindex) else: raise NotImplementedError(f"source must be 'pras' but is '{source}'") @@ -4588,3 +4843,571 @@ def plot_storage_soc( # ) plots.despine(ax, left=False) return f, ax, dfsoc_frac + + +def get_import_export(region, df): + """""" + firsts = [c for c in df if c.startswith(region)] + lasts = [c for c in df if c.endswith(region)] + net = df[firsts].sum(axis=1) - df[lasts].sum(axis=1) + exports = -net.clip(lower=0) + imports = net.clip(upper=0).abs() + return pd.concat({'net import':imports, 'net export':exports}, axis=1) + + +def map_period_dispatch( + case, year=2050, level='transreg', + period='max load', + transmission=True, gen=True, + wscale='auto', width_total=1e5, + drawstates=0, drawzones=0, drawgrid=False, legend=True, + onlydata=False, + scale_val=100, scale_x=0.4e6, scale_y=-1.45e6, + ): + """ + Notes + * Currently only works for stress periods + """ + # ### Inputs for debugging + # case = os.path.join( + # reeds_path,'runs', + # 'v20240212_transopM0_WECC_CPNP_GP1_TFY2035_PTL2035_TRc_MITCg0p3') + # case = ( + # '/Volumes/ReEDS/Users/pbrown/ReEDSruns/20240115_transop/20240217/' + # 'v20240217_transopK0_CPNP_GP1_TFY32_PTL32_TRc_TCa1p0_MTCg0p3' + # # 'v20240217_transopK1_CPNP_GP1_TFY35_PTL50_TRt_TCa1p15_MTCg0p0' + # ) + # year = 2050 + # level = 'transreg' + # transmission = False + # gen = True + # wscale='auto' + # width_total=1e5 + # drawstates=0. + # drawzones=0. + # drawgrid=False + # legend=True + # periods = ['max gen','max load','min solar','min wind','min vre'] + # periods = ['2009-01-15','2012-09-15'] + # onlydata = False + + ### Shared inputs + hierarchy = pd.read_csv( + os.path.join(case,'inputs_case','hierarchy.csv') + ).rename(columns={'*r':'r'}).set_index('r') + + dfba = get_zonemap(case) + dfmap = get_dfmap(case) + + ## Region centers + _dfcenter = pd.read_csv( + os.path.join(reeds_path,'postprocessing','plots','transmission-interface-coords.csv'), + index_col=['level','region1','region2'], + ).drop('drop',axis=1).loc[level].reset_index() + _dfcenter[['x','y']] *= 1e6 + dfcenter = ( + _dfcenter + .loc[~_dfcenter.index.duplicated()] + .loc[_dfcenter.region1 == _dfcenter.region2] + .rename(columns={'region1':'aggreg'}) + .drop(['region2','angle'], axis=1) + .set_index('aggreg') + ) + ## Transmission interfaces, with reverse direction + dfcorridors = pd.concat([ + _dfcenter, + _dfcenter.assign(region1=_dfcenter.region2).assign(region2=_dfcenter.region1) + ], axis=0).set_index(['region1','region2']) + + dfcorridors = dfcorridors.loc[~dfcorridors.index.duplicated()].copy() + + ## Colors + bokehcolors = pd.read_csv( + os.path.join(reeds_path,'postprocessing','bokehpivot','in','reeds2','tech_style.csv'), + index_col='order').squeeze(1) + + # tech_map = pd.read_csv( + # os.path.join(reeds_path,'postprocessing','bokehpivot','in','reeds2','tech_map.csv'), + # index_col='raw').squeeze(1) + + bokehcolors = pd.concat([ + bokehcolors.loc['smr':'electrolyzer'], + pd.Series('#D55E00', index=['dac'], name='color'), + bokehcolors.loc[:'Canada'], + ]) + + bokehcolors['canada'] = bokehcolors['Canada'] + bokehcolors['net import'] = '0.7' + bokehcolors = pd.concat([pd.Series({'net export':'0.7'}, name='color'), bokehcolors]) + + ### Parse and check inputs + regions = hierarchy[level].unique() + if level != 'transreg': + raise NotImplementedError(f"level={level} but only level='transreg' works") + if len(dfcenter.drop_duplicates('x')) != len(dfcenter): + raise ValueError('Use unique x values in transmissioninterface-coords.csv') + + ### Get outputs + gen_h_stress = pd.read_csv( + os.path.join(case,'outputs','gen_h_stress.csv'), + header=0, names=['i','r','h','t','GW'], + ) + gen_h_stress.GW /= 1000 + gen_h_stress.i = simplify_techs(gen_h_stress.i) + + ## Aggregate transmission flows + tran_flow_stress = pd.read_csv( + os.path.join(case,'outputs','tran_flow_stress.csv'), + ).rename(columns={'Value':'GW', 'allh':'h'}) + tran_flow_stress.GW /= 1e3 + + tran_flow_stress['aggreg'] = tran_flow_stress.r.map(hierarchy[level]) + tran_flow_stress['aggregg'] = tran_flow_stress.rr.map(hierarchy[level]) + tran_flow_stress['interface'] = tran_flow_stress.aggreg + '|' + tran_flow_stress.aggregg + + tran_flow_stress_agg = ( + tran_flow_stress + .loc[tran_flow_stress.aggreg != tran_flow_stress.aggregg] + .groupby(['t','h','interface']).GW.sum().unstack('interface').fillna(0) + ) + + ## Load + load_stress = pd.read_csv( + os.path.join(case,'outputs','load_stress.csv'), + ).rename(columns={'Value':'GW', 'allh':'h'}) + load_stress.GW /= 1e3 + dfload_allperiods = ( + load_stress.assign(region=load_stress.r.map(hierarchy[level])) + .groupby(['t','region','h']).GW.sum() + .loc[year].unstack('region') + ) + dfload_allperiods['szn'] = dfload_allperiods.index.map(lambda x: x.split('h')[0]) + dfload_allperiods = dfload_allperiods.reset_index().set_index(['szn','h']) + + sw = pd.read_csv( + os.path.join(case, 'inputs_case', 'switches.csv'), header=None, index_col=0).squeeze(1) + numsteps = 24 // int(sw['GSw_HourlyChunkLengthStress']) * (1 if sw['GSw_HourlyType'] == 'day' else 5) + + ### Map dispatch to level + gen_h_stress['aggreg'] = gen_h_stress.r.map(hierarchy[level]) + gen_h_stress_agg = ( + gen_h_stress.loc[gen_h_stress.t==year] + .groupby(['aggreg','i','h'], as_index=False).GW.sum() + ) + gen_h_stress_agg['szn'] = gen_h_stress_agg.h.str.split('h', expand=True)[0] + gen_h_stress_agg = gen_h_stress_agg.set_index(['szn','aggreg','h','i']).GW.unstack(['i']).fillna(0) + + ### Keep a single day + if period == 'max gen': + top_periods = gen_h_stress_agg.groupby('szn').sum().sum(axis=1).sort_values(ascending=False) + keep_period = top_periods.index[0] + elif period == 'max load': + keep_period = dfload_allperiods.sum(axis=1).groupby('szn').max().nlargest(1).index[0] + elif period == 'min solar': + keep_period = ( + gen_h_stress_agg + [[c for c in gen_h_stress_agg if (('pv' in c) or ('csp' in c))]].sum(axis=1) + .groupby('szn').sum() + .nsmallest(1) + .index[0] + ) + elif period == 'min wind': + keep_period = ( + gen_h_stress_agg + [[c for c in gen_h_stress_agg if ('wind' in c)]].sum(axis=1) + .groupby('szn').sum() + .nsmallest(1) + .index[0] + ) + elif period == 'min vre': + keep_period = ( + gen_h_stress_agg + [[c for c in gen_h_stress_agg if (('pv' in c) or ('csp' in c) or ('wind' in c))]].sum(axis=1) + .groupby('szn').sum() + .nsmallest(1) + .index[0] + ) + ## Otherwise we're dealing with timestamps + else: + if period.startswith('sy'): + keep_period = period + else: + timestamp = pd.Timestamp(period) + keep_period = 's' + timestamp2h(timestamp).split('h')[0] + + hours = sorted(list(set(sorted([i for i in gen_h_stress.h if i.startswith(keep_period)])))) + dfgen = gen_h_stress_agg.loc[keep_period].reindex(bokehcolors.index, axis=1).dropna(axis=1, how='all') + dfload = dfload_allperiods.loc[keep_period].copy() + + ### Add imports and exports + if len(tran_flow_stress_agg): + dfimportexport = {} + for region in regions: + dfimportexport[region] = get_import_export( + region=region, df=tran_flow_stress_agg.loc[year].loc[hours] + ) + dfimportexport = pd.concat(dfimportexport) + dfgen = pd.concat([dfgen, dfimportexport], axis=1) + + ### Get transmission flows from westmost to eastmost region of each interface + if transmission: + dftrans = tran_flow_stress_agg.loc[year].loc[hours].copy() + regions_sorted = dfcenter.sort_values('x').index.tolist() + for interface in dftrans: + r1, r2 = interface.split('|') + if regions_sorted.index(r2) < regions_sorted.index(r1): + dftrans[interface] *= -1 + dftrans = dftrans.rename(columns={interface:f'{r2}|{r1}'}) + dftrans = dftrans.groupby(axis=1, level='interface').sum() + else: + dftrans = pd.DataFrame() + + out = {'gen':dfgen, 'load':dfload, 'trans':dftrans} + if onlydata: + return out + + ### Plot settings + width_step = width_total / numsteps * 2 + if wscale in ['auto','scale','default',None,'','max']: + capmax = dfgen.groupby('h').sum().sum(axis=1).max() + _wscale = 3e6 / capmax * 0.95 + else: + _wscale = wscale + + ###### Plot it + plt.close() + f,ax = plt.subplots(figsize=(12,8)) + ### Plot background + dfmap[level].plot(ax=ax, facecolor='none', edgecolor='k', lw=1.) + if drawstates: + dfmap['st'].plot(ax=ax, facecolor='none', edgecolor='0.5', lw=drawstates) + if drawzones: + dfba.plot(ax=ax, facecolor='none', edgecolor='0.5', lw=drawzones) + + if gen: + for region in regions: + x, y = dfcenter.loc[region] + ### Dispatch stack + _dfgen = dfgen.loc[region] * _wscale + _dfgen.index = np.linspace(-1,1,numsteps+1)[:numsteps] * width_total + y -= _dfgen.sum(axis=1).max() / 2 + plots.stackbar( + df=_dfgen, + ax=ax, colors=bokehcolors, width=width_step, net=False, + bottom=y, x0=x, + ) + ax.plot( + x + _dfgen.index.values * (1 + width_step / width_total / 2), + [y]*len(_dfgen), + c='k', ls=':', lw=0.75) + ### Demand + _dfload = dfload[region] * _wscale + _dfload.index = _dfgen.index + x + for i, (x, val) in enumerate(_dfload.items()): + ax.plot( + [x-width_step/2*0.85, x+width_step/2*0.85], [val+y]*2, + c='k', ls='-', lw=1, + solid_capstyle='butt', + ) + + if transmission: + for interface in dftrans: + r1, r2 = interface.split('|') + x, y , angle = dfcorridors.loc[(r1, r2)] + df = dftrans[interface] * _wscale + df.index = np.linspace(-1,1,numsteps+1)[:numsteps] * width_total + plots.stackbar( + df=df.rename('trans').to_frame(), + ax=ax, colors={'trans':'0.7'}, width=width_step, net=False, + bottom=y, x0=x, + ) + ax.plot( + x + df.index.values * (1 + width_step / width_total / 2), + [y]*len(df), + c='k', ls=':', lw=0.75) + + ###### Legend + if legend: + handles = [ + mpl.patches.Patch(facecolor=bokehcolors[i], edgecolor='none', label=i) + for i in bokehcolors.index if i in dfgen.columns + ] + _leg = ax.legend( + handles=handles[::-1], loc='lower left', fontsize=9, ncol=1, frameon=False, + bbox_to_anchor=(0.9,0.02), + handletextpad=0.3, handlelength=0.7, columnspacing=0.5, + ) + ### Scale + if scale_val: + bottom = scale_y - (scale_val * _wscale / 2) + ## -1.3e6, -1.3e6 + ax.bar( + x=[scale_x], height=[scale_val * _wscale], bottom=[bottom], + color='k', width=width_step, + ) + ax.annotate( + # f"↕{scale_val} GW\n↔{sw.GSw_HourlyChunkLengthStress} hours", + f"↕{scale_val} GW", + (scale_x+0.04e6, scale_y), va='center', fontsize=16, + ) + ax.annotate( + f"↔ {sw.GSw_HourlyChunkLengthStress} hours", + (scale_x-0.06e6, bottom), va='top', fontsize=16, + ) + + ###### Formatting + ax.axis('off') + timestring = h2timestamp(keep_period+'h01').strftime('%Y-%m-%d') + ax.annotate( + (timestring if period == timestring else f"{period}\n{timestring}"), + (0.2,0.2), xycoords='axes fraction', ha='right', va='top', fontsize=18, + ) + ### Grid + if drawgrid: + ax.xaxis.set_major_locator(mpl.ticker.MultipleLocator(0.5e6)) + ax.xaxis.set_minor_locator(mpl.ticker.MultipleLocator(0.1e6)) + ax.yaxis.set_minor_locator(mpl.ticker.MultipleLocator(0.1e6)) + ax.grid(which='major', ls=':', lw=0.75) + ax.grid(which='minor', ls=':', lw=0.25) + ax.axis('on') + + return f, ax, out + + +def plot_pras_eue_timeseries_full( + case, year=2050, iteration='last', samples=None, level='transgrp', ymax=None, + figsize=(6, 5)): + """ + Dropped load timeseries + """ + if iteration == 'last': + _, _iteration = get_last_iteration( + case=case, year=year, samples=samples) + else: + _iteration = iteration + infile = os.path.join( + case, 'ReEDS_Augur', 'PRAS', + f"PRAS_{year}i{_iteration}" + (f'-{samples}' if samples is not None else '') + '.h5' + ) + dfpras = functions.read_pras_results(infile) + dfpras.index = functions.make_fulltimeindex() + ## Only keep EUE + dfpras = dfpras[[ + c for c in dfpras if (c.endswith('EUE') and not c.lower().startswith('usa')) + ]].copy() + + ### Sum by hierarchy level + hierarchy = pd.read_csv( + os.path.join(case,'inputs_case','hierarchy.csv') + ).rename(columns={'*r':'r'}).set_index('r') + dfpras_agg = ( + dfpras + .rename(columns={c: c[:-len('_EUE')] for c in dfpras}) + .rename(columns=hierarchy[level]) + .groupby(axis=1, level=0).sum() + / 1e3 + ) + + wys = range(2007,2014) + colors = plots.rainbowmapper(dfpras_agg.columns) + + ### Plot it + plt.close() + f,ax = plt.subplots(len(wys), 1, sharex=False, sharey=True, figsize=figsize) + for row, y in enumerate(wys): + timeindex_y = pd.date_range( + f"{y}-01-01", f"{y+1}-01-01", inclusive='left', freq='H', + tz='EST')[:8760] + for region, color in colors.items(): + ax[row].fill_between( + timeindex_y, dfpras_agg.loc[str(y),region:].sum(axis=1).values, + lw=1, color=color, label=region) + ### Formatting + ax[row].annotate( + y, (0.01,1), xycoords='axes fraction', + fontsize=14, weight='bold', va='top') + ax[row].set_xlim( + pd.Timestamp(f"{y}-01-01 00:00-05:00"), + pd.Timestamp(f"{y}-12-31 23:59-05:00")) + ax[row].xaxis.set_major_locator(mpl.dates.MonthLocator(tz='EST')) + ax[row].xaxis.set_minor_locator(mpl.dates.WeekdayLocator(byweekday=mpl.dates.SU)) + if row == len(wys) - 1: + ax[row].xaxis.set_major_formatter(mpl.dates.DateFormatter('%b')) + else: + ax[row].set_xticklabels([]) + ax[0].legend( + loc='upper left', bbox_to_anchor=(1,1), + frameon=False, columnspacing=0.5, handlelength=0.7, handletextpad=0.3, + ) + ax[0].set_ylim(0, ymax) + ax[len(wys)-1].set_ylabel('Expected unserved energy [GWh/h]', y=0, ha='left') + plots.despine(ax) + return f, ax, dfpras, _iteration + + +def plot_seed_stressperiods( + case, cmap=cmocean.cm.phase, startfrom=200, + alpha=0.7, fontsize=5, pealpha=0.8, pelinewidth=1.5, +): + """ + """ + sw = pd.read_csv( + os.path.join(case,'inputs_case','switches.csv'), + header=None, index_col=0, + ).squeeze(1) + + hierarchy = pd.read_csv( + os.path.join(case,'inputs_case','hierarchy.csv') + ).rename(columns={'*r':'r'}).set_index('r') + dfmap = get_dfmap(case) + + years = pd.read_csv( + os.path.join(os.path.join(case,'inputs_case','modeledyears.csv')) + ).columns.astype(int).values + years = [y for y in years if y >= int(sw.GSw_StartMarkets)] + + ### Get seed Stress periods + dictin_seed = {} + for year in years: + dictin_seed[year] = pd.read_csv( + os.path.join(case, 'inputs_case', f'stress{year}i0', 'forceperiods.csv') + ) + dictin_seed[year]['timestamp'] = dictin_seed[year].szn.map(h2timestamp) + dictin_seed[year]['date'] = dictin_seed[year].timestamp.map(lambda x: x.strftime('%Y-%m-%d')) + dictin_seed[year]['monthday'] = dictin_seed[year].timestamp.map(lambda x: x.strftime('%m-%d')) + + days_of_year = pd.date_range('2004-01-01','2004-12-31',freq='D') + monthdays = days_of_year.strftime('%m-%d') + + ### Recalculate peak load days since we dropped duplicates above + load_allyears = hourly_repperiods.get_load( + os.path.join(case, 'inputs_case'), + keep_weatheryears='all').loc[years] + timestamps = pd.read_csv(os.path.join(case,'inputs_case','timestamps.csv')) + ## Add descriptive index + load_allyears.index = ( + pd.concat( + {y: timestamps for y in years}, + axis=0, names=['modelyear','h_of_modelyear']).reset_index() + .set_index(['modelyear','year','yperiod','h_of_period']).index + ) + stressperiods_load = { + y: hourly_repperiods.identify_peak_containing_periods( + df=load_allyears.loc[y], hierarchy=hierarchy, + level=sw['GSw_PRM_StressSeedLoadLevel']) + for y in years + } + + ### Put cold colors in winter + colorvals = ( + list(np.linspace(0,1,len(monthdays))[startfrom:]) + + list(np.linspace(0,1,len(monthdays))[:startfrom]) + ) + monthday2val = dict(zip(monthdays, colorvals)) + + # ### Test it + # step = 5 + # plt.close() + # f,ax = plt.subplots(figsize=(12,3)) + # for x, monthday in enumerate(monthdays[::step]): + # color = cmap(monthday2val[monthday]) + # ax.plot([x], [0], color=color, marker='o') + # ax.annotate(monthday, (x, 0.015), rotation=90, ha='center', va='center', color=color) + # ax.set_title(startfrom) + # plots.despine(ax) + # plt.show() + + + ### Plot it + ncols = 3 + nrows = len(years) // ncols + 1 + loadcoords = dict(zip(years, [(row+1,col) for row in range(nrows) for col in range(ncols)])) + minrecoords = { + 'upv': (0, 1), + 'wind-ons': (0, 2) + } + title = {'upv':'Solar','wind-ons':'Wind'} + + plt.close() + f,ax = plt.subplots( + nrows, ncols, figsize=(2.9*ncols, 2*nrows), + gridspec_kw={'hspace':0, 'wspace':0}, + ) + ### Min RE + for tech, (row, col) in minrecoords.items(): + ax[row,col].set_title(f'Min {title[tech]}', va='top', y=0.95, fontsize=11) + df = dfmap[sw.GSw_PRM_StressSeedMinRElevel].copy() + dates = ( + dictin_seed[years[0]].loc[dictin_seed[years[0]].property==tech] + .set_index('region') + ) + df['date'] = dates.date + df['monthday'] = dates.monthday + df['val'] = df.monthday.map(monthday2val) + df.plot( + ax=ax[row,col], facecolor='none', edgecolor='k', lw=0.2, zorder=1e6, + ) + df.plot( + ax=ax[row,col], column='val', edgecolor='none', lw=0, cmap=cmap, alpha=alpha, + vmin=0, vmax=1, + ) + for i, _row in df.iterrows(): + ax[row,col].annotate( + _row.date, (_row.labelx, _row.labely), + ha='center', va='center', color='k', fontsize=fontsize, + path_effects=[pe.withStroke(linewidth=pelinewidth, foreground='w', alpha=pealpha)], + ) + + + ### Max load + for year in years: + row, col = loadcoords[year] + ax[row,col].set_title(f'Peak load {year}', va='top', y=0.95, fontsize=11) + df = dfmap[sw.GSw_PRM_StressSeedLoadLevel].copy() + dates = ( + dictin_seed[year].loc[dictin_seed[year].property=='load'] + .set_index('region') + ) + ## Look up missing values + lookup_days = pd.DataFrame(list(stressperiods_load[year])).set_index(0) + lookup_days['szn'] = 'y' + lookup_days[2].astype(str) + 'd' + lookup_days[3].map(lambda x: f'{x:0>3}') + lookup_days['timestamp'] = lookup_days.szn.map(h2timestamp) + lookup_days['date'] = lookup_days.timestamp.map(lambda x: x.strftime('%Y-%m-%d')) + lookup_days['monthday'] = lookup_days.timestamp.map(lambda x: x.strftime('%m-%d')) + + df = df.merge(lookup_days[['date','monthday']], left_index=True, right_index=True) + # df['date'] = dates.date + # df['monthday'] = dates.monthday + df['val'] = df.monthday.map(monthday2val) + + df.plot( + ax=ax[row,col], facecolor='none', edgecolor='k', lw=0.2, + ) + df.plot( + ax=ax[row,col], column='val', edgecolor='none', lw=0, cmap=cmap, alpha=alpha, + vmin=0, vmax=1, + ) + for i, _row in df.iterrows(): + ax[row,col].annotate( + _row.date, (_row.labelx, _row.labely), + ha='center', va='center', color='k', fontsize=fontsize, + path_effects=[pe.withStroke(linewidth=pelinewidth, foreground='w', alpha=pealpha)], + ) + + ### Colorbar + row, col = 0, 0 + for x, monthday in enumerate(monthdays): + color = cmap(monthday2val[monthday]) + ax[row,col].plot([x], [0], color=color, marker='|', lw=0.1) + if monthday.endswith('15'): + ax[row,col].annotate( + days_of_year[x].strftime('%b'), + # monthday, + (x, 0.008), rotation=90, ha='center', va='bottom', color=color) + + ### Formatting + for row in range(nrows): + for col in range(ncols): + ax[row,col].axis('off') + + return f, ax diff --git a/postprocessing/transmission_maps.py b/postprocessing/transmission_maps.py index 2343d86..b01d987 100644 --- a/postprocessing/transmission_maps.py +++ b/postprocessing/transmission_maps.py @@ -5,6 +5,7 @@ import site import argparse import traceback +import cmocean os.environ['PROJ_NETWORK'] = 'OFF' @@ -22,15 +23,13 @@ ## need to be reduced to avoid too much overlap in the plotted routes wscale_straight = 0.0004 wscale_routes = 1.5 -wscale_h2 = 3 +wscale_h2 = 10 routes = False ## Note that if you change the CRS you'll probably need to change ## the position of the annotations crs = 'ESRI:102008' ### For generation capacity map -cmap = plt.cm.gist_earth_r -# cmap = plt.cm.YlGnBu -# cmap = plt.cm.PuBuGn +cmap = cmocean.cm.rain ncols = 4 techs = [ 'Utility PV', 'Land-based wind', 'Offshore wind', 'Electrolyzer', @@ -81,7 +80,7 @@ ################### #%% ARGUMENT INPUTS parser = argparse.ArgumentParser(description='transmission maps') -parser.add_argument('--casedir', '-c', type=str, +parser.add_argument('--case', '-c', type=str, help='path to ReEDS run folder') parser.add_argument('--year', '-y', type=int, default=2050, help='year to plot') @@ -89,16 +88,16 @@ help='if True, show actual transmission routes') args = parser.parse_args() -casedir = args.casedir +case = args.case year = args.year routes = args.routes # #%% Inputs for testing -# casedir = ( -# '/Volumes/ReEDS/Users/pbrown/ReEDSruns/20240112_stresspaper/20240112/' -# 'v20240112_stresspaperE0_SP_DemHi_90by2035__core') -# casedir = os.path.expanduser('~/github/ReEDS-2.0/runs/v20240218_stressstorM0_Z45_SP_5yr_H0_Southwest') -# year = 2035 +# case = os.path.expanduser('~/github/ReEDS-2.0/runs/v20240218_stressstorM0_Z45_SP_5yr_H0_Southwest') +# case = ( +# '/Volumes/ReEDS/Users/pbrown/ReEDSruns/20240112_stresspaper/20240313/' +# 'v20240313_stresspaperE0_SP_DemHi_90by2035__core') +# year = 2050 # routes = False # interactive = True # write = False @@ -108,10 +107,10 @@ ############# #%% PROCEDURE #%% Set up logger -log = makelog(scriptname=__file__, logpath=os.path.join(casedir,'gamslog.txt')) +log = makelog(scriptname=__file__, logpath=os.path.join(case,'gamslog.txt')) #%% Make output directory -savepath = os.path.join(casedir,'outputs','maps') +savepath = os.path.join(case,'outputs','maps') os.makedirs(savepath, exist_ok=True) #%% Load colors @@ -125,22 +124,22 @@ #%% Load switches sw = pd.read_csv( - os.path.join(casedir, 'inputs_case', 'switches.csv'), + os.path.join(case, 'inputs_case', 'switches.csv'), header=None, index_col=0).squeeze(1) years = pd.read_csv( - os.path.join(casedir,'inputs_case','modeledyears.csv') + os.path.join(case,'inputs_case','modeledyears.csv') ).columns.astype(int).values yearstep = years[-1] - years[-2] val_r = pd.read_csv( - os.path.join(casedir, 'inputs_case', 'val_r.csv'), header=None).squeeze(1).tolist() + os.path.join(case, 'inputs_case', 'val_r.csv'), header=None).squeeze(1).tolist() #%% Transmission line map with disaggregated transmission types ### Plot both total capacity (subtract_baseyear=None) and new (subtract_baseyear=2020) for subtract_baseyear in [None, 2020]: try: plt.close() - f,ax = rplots.plot_trans_onecase( - case=casedir, pcalabel=False, + f, ax, _ = rplots.plot_trans_onecase( + case=case, pcalabel=False, routes=routes, simpletypes=None, wscale=(wscale_routes if routes else wscale_straight), yearlabel=False, year=year, alpha=1.0, @@ -179,8 +178,8 @@ try: for plottype in ['mean','max']: plt.close() - f,ax = rplots.plot_transmission_utilization( - case=casedir, year=year, plottype=plottype, + f, ax, df = rplots.plot_transmission_utilization( + case=case, year=year, plottype=plottype, wscale=wscale_straight, alpha=1.0, cmap=cmap, ) savename = f'map_transmission_utilization-{plottype}-{year}' @@ -197,7 +196,7 @@ try: plt.close() f,ax = rplots.plot_average_flow( - case=casedir, year=year, wscale=wscale_routes*8e3, + case=case, year=year, wscale=wscale_routes*8e3, ) savename = f'map_transmission_utilization-flowdirection-{year}.png' if write: @@ -213,7 +212,7 @@ try: plt.close() f,ax = rplots.plot_prmtrade( - case=casedir, year=year, wscale=wscale_straight*8e3, + case=case, year=year, wscale=wscale_straight*8e3, ) savename = f'map_transmission_utilization-prmtrade-{year}.png' if write: @@ -232,7 +231,7 @@ if int(sw.GSw_VSC): plt.close() f,ax = rplots.plot_trans_vsc( - case=casedir, year=year, wscale=wscale_straight*1e3, + case=case, year=year, wscale=wscale_straight*1e3, alpha=1.0, miles=300, ) savename = f'map_translines_vsc-{year}.png' @@ -251,11 +250,11 @@ ### Plot with tech-specific (vmax='each') and uniform (vmax='shared') color axis for vmax in ['each', 'shared']: try: - dfba = rplots.get_zonemap(casedir).loc[val_r] + dfba = rplots.get_zonemap(case).loc[val_r] dfstates = dfba.dissolve('st') ### Case data dfcap = pd.read_csv( - os.path.join(casedir,'outputs','cap.csv'), + os.path.join(case,'outputs','cap.csv'), names=['i','r','t','MW'], header=0, ) dfcap.i = dfcap.i.str.lower().map(lambda x: techmap.get(x,x)) @@ -301,7 +300,7 @@ ) ax[coords[tech]].axis('off') ax[0,0].set_title( - '{} ({})'.format(os.path.basename(casedir), year), + '{} ({})'.format(os.path.basename(case), year), x=0.1, ha='left', va='top') savename = f'map_capacity-{year}-{vmax}.png' if write: @@ -318,7 +317,7 @@ try: plt.close() f,ax = rplots.plot_vresites_transmission( - casedir, year, crs=crs, cm=gen_cmap, + case, year, crs=crs, cm=gen_cmap, routes=False, wscale=wscale_straight, show_overlap=False, subtract_baseyear=None, show_transmission=False, alpha=transalpha, colors=transcolor, ms=ms, @@ -339,7 +338,7 @@ try: plt.close() f,ax = rplots.plot_vresites_transmission( - casedir, year, crs=crs, cm=gen_cmap, + case, year, crs=crs, cm=gen_cmap, routes=routes, show_overlap=False, wscale=wscale_routes, subtract_baseyear=None, show_transmission=True, @@ -360,7 +359,7 @@ try: for val in ['cap','gen']: plt.close() - f,ax = rplots.map_agg(case=casedir, data=val, width_step=yearstep) + f,ax = rplots.map_agg(case=case, data=val, width_step=yearstep) savename = f'map_agg-FERC-{val}-{year}.png' if write: plt.savefig(os.path.join(savepath, savename)) @@ -370,7 +369,7 @@ print(savename) plt.close() - f,ax = rplots.map_trans_agg(case=casedir, wscale=1000, drawzones=0.05) + f,ax = rplots.map_trans_agg(case=case, wscale=1000, drawzones=0.05, width_step=yearstep) savename = f'map_agg-FERC-trans-{year}.png' if write: plt.savefig(os.path.join(savepath, savename)) @@ -380,7 +379,7 @@ print(savename) plt.close() - f,ax = rplots.map_agg(case=casedir, data='cap', width_step=yearstep, transmission=True) + f,ax = rplots.map_agg(case=case, data='cap', width_step=yearstep, transmission=True) savename = f'map_agg-FERC-cap,trans-{year}.png' if write: plt.savefig(os.path.join(savepath, savename)) @@ -404,7 +403,7 @@ for v in [0,1]: plt.close() f,ax = rplots.plot_dispatch_yearbymonth( - case=casedir, t=year, highlight_rep_periods=v, techs=techs) + case=case, t=year, highlight_rep_periods=v, techs=techs) endname = '' if not techs else f"-{','.join(techs)}" savename = f'plot_dispatch-yearbymonth-{v}-{year}{endname}.png' if write: @@ -419,7 +418,7 @@ try: plt.close() - f,ax = rplots.plot_dispatch_weightwidth(case=casedir) + f,ax = rplots.plot_dispatch_weightwidth(case=case) savename = f'plot_dispatch-weightwidth-{sw.endyear}.png' if write: plt.savefig(os.path.join(savepath, savename)) @@ -436,7 +435,7 @@ try: for sideplots in [False, True]: plt.close() - f,ax,eax = rplots.map_zone_capacity(case=casedir, year=year, sideplots=sideplots) + f,ax,eax = rplots.map_zone_capacity(case=case, year=year, sideplots=sideplots) savename = f'map_gencap_transcap-{year}{"-sideplots" if sideplots else ""}.png' if write: plt.savefig(os.path.join(savepath, savename)) @@ -452,7 +451,7 @@ #%% Interregional transmission / peak demand try: for level in ['transreg']: - f, ax, dfplot = rplots.plot_interreg_transfer_cap_ratio(case=casedir, level=level) + f, ax, dfplot = rplots.plot_interreg_transfer_cap_ratio(case=case, level=level) savename = f'plot_interreg_transfer_ratio-{level}.png' if write: plt.savefig(os.path.join(savepath, savename)) @@ -468,7 +467,7 @@ #%% Differences betweens solve years try: plt.close() - f,ax = rplots.plot_retire_add(case=casedir) + f,ax = rplots.plot_retire_add(case=case) savename = 'bars_retirements_additions.png' if write: plt.savefig(os.path.join(savepath, savename)) @@ -486,7 +485,7 @@ if int(sw.GSw_H2): plt.close() f,ax = rplots.map_h2_capacity( - case=casedir, year=year, cmap=plt.cm.gist_earth_r, wscale_h2=0.2) + case=case, year=year, cmap=cmap, wscale_h2=wscale_h2) savename = f'map_h2_capacity-{sw.endyear}.png' if write: plt.savefig(os.path.join(savepath, savename)) @@ -505,7 +504,7 @@ for plottype in ['mean','max']: plt.close() f,ax = rplots.plot_transmission_utilization( - case=casedir, year=year, plottype=plottype, network='h2', + case=case, year=year, plottype=plottype, network='h2', wscale=wscale_h2/1000, alpha=1.0, cmap=cmap, extent='modeled', ) savename = f'map_pipeline_utilization-{plottype}-{year}.png' @@ -523,7 +522,7 @@ if int(sw['GSw_H2_Transport']): plt.close() f,ax = rplots.plot_average_flow( - case=casedir, year=year, network='h2', + case=case, year=year, network='h2', cm=plt.cm.magma_r, extent='modeled', wscale=wscale_h2*1e4, ) savename = f'map_pipeline_utilization-flowdirection-{year}.png' @@ -545,7 +544,7 @@ agglevel = ('r' if len(val_r) <= 20 else ('st' if len(val_r) <= 30 else 'transreg')) plt.close() f, ax = rplots.plot_h2_timeseries( - case=casedir, year=year, agglevel=agglevel, grid=0) + case=case, year=year, agglevel=agglevel, grid=0) savename = f'plot_h2_timeseries-{year}.png' if write: plt.savefig(os.path.join(savepath, savename)) @@ -559,11 +558,25 @@ #%% Stress periods -if not int(sw.GSw_PRM_CapCredit): +if (not int(sw.GSw_PRM_CapCredit)) or (int(sw.pras == 2)): + try: + plt.close() + f,ax = rplots.plot_seed_stressperiods(case=case) + savename = 'map_stressperiod_seeds.png' + if write: + plt.savefig(os.path.join(savepath, savename)) + if interactive: + plt.show() + plt.close() + print(savename) + except Exception: + print('map_stressperiod_seeds failed:') + print(traceback.format_exc()) + try: plt.close() level, regions = 'country', ['USA'] - f,ax = rplots.plot_stressperiod_dispatch(case=casedir, level=level, regions=regions) + f,ax = rplots.plot_stressperiod_dispatch(case=case, level=level, regions=regions) savename = f'plot_stressperiod_dispatch-{",".join(regions)}.png' if write: plt.savefig(os.path.join(savepath, savename)) @@ -577,7 +590,7 @@ try: plt.close() - f,ax = rplots.plot_stressperiod_days(case=casedir) + f,ax = rplots.plot_stressperiod_days(case=case, repcolor='none', sharey=True) savename = 'plot_stressperiod_dates.png' if write: plt.savefig(os.path.join(savepath, savename)) @@ -594,7 +607,7 @@ level = 'transgrp' plt.close() f,ax = rplots.plot_stressperiod_evolution( - case=casedir, level=level, metric=metric) + case=case, level=level, metric=metric) savename = f'plot_stressperiod_evolution-{metric}-{level}.png' if write: plt.savefig(os.path.join(savepath, savename)) @@ -609,7 +622,7 @@ try: plt.close() levels = ['country','interconnect','transreg','transgrp'] - f, ax, _ = rplots.plot_neue_bylevel(case=casedir, levels=levels) + f, ax, _ = rplots.plot_neue_bylevel(case=case, levels=levels) savename = f"plot_stressperiod_neue-{','.join(levels)}.png" if write: plt.savefig(os.path.join(savepath, savename)) @@ -621,9 +634,27 @@ print('plot_stressperiod_neue failed:') print(traceback.format_exc()) + try: + level = 'transreg' + periods = ['max gen','max load','min solar','min wind','min vre'] + for period in periods: + plt.close() + f, ax, _ = rplots.map_period_dispatch( + case=case, year=year, level=level, period=period, transmission=False, + ) + savename = f"map_dispatch_stressperiod-{level}-{year}-{period.replace(' ','')}.png" + if write: + plt.savefig(os.path.join(savepath, savename)) + if interactive: + plt.show() + plt.close() + print(savename) + except Exception: + print('map_period_dispatch failed:') + try: plt.close() - f, ax, _ = rplots.plot_interface_flows(case=casedir, year=year) + f, ax, _ = rplots.plot_interface_flows(case=case, year=year) savename = f'plot_PRAS_flows-{year}.png' if write: plt.savefig(os.path.join(savepath, savename)) @@ -637,7 +668,7 @@ try: plt.close() - f, ax, _ = rplots.plot_storage_soc(case=casedir, year=year) + f, ax, _ = rplots.plot_storage_soc(case=case, year=year) savename = f'plot_PRAS_storage-{year}.png' if write: plt.savefig(os.path.join(savepath, savename)) @@ -649,6 +680,39 @@ print('plot_PRAS_storage failed:') print(traceback.format_exc()) + try: + level = 'transgrp' + iteration = 'last' + plt.close() + f, ax, df, i = rplots.plot_pras_eue_timeseries_full( + case=case, year=year, level=level, iteration=iteration) + savename = f'plot_PRAS_EUE-{level}-{year}i{i}.png' + if write: + plt.savefig(os.path.join(savepath, savename)) + if interactive: + plt.show() + plt.close() + print(savename) + except Exception: + print('plot_pras_eue_timeseries_full failed:') + print(traceback.format_exc()) + + try: + for y in [y for y in years if y >= 2025]: + plt.close() + # f, ax, neue, _iteration = rplots.map_neue(case=case, year=y, iteration=0) + f, ax, neue, _iteration = rplots.map_neue(case=case, year=y) + savename = f"map_PRAS_neue-{y}i{_iteration}.png" + if write: + plt.savefig(os.path.join(savepath, savename)) + if interactive: + plt.show() + plt.close() + print(savename) + except Exception: + print('map_neue failed:') + print(traceback.format_exc()) + #%% Capacity markers try: @@ -657,7 +721,7 @@ for level in ['r','st']: plt.close() f,ax = rplots.map_capacity_markers( - case=casedir, level=level, year=year, ms=ms[level]) + case=case, level=level, year=year, ms=ms[level]) savename = f'map_units-gencap-{level}.png' if write: plt.savefig(os.path.join(savepath, savename)) @@ -670,7 +734,7 @@ end = f'-since{subtract_baseyear}' if subtract_baseyear else '' plt.close() f,ax = rplots.map_transmission_lines( - case=casedir, level='r', year=year, subtract_baseyear=subtract_baseyear) + case=case, level='r', year=year, subtract_baseyear=subtract_baseyear) savename = f'map_units-transcap{end}.png' if write: plt.savefig(os.path.join(savepath, savename)) @@ -681,8 +745,8 @@ ### Both plt.close() f,ax = rplots.map_transmission_lines( - case=casedir, level='r', year=year, alpha=0.5, lw=0.15) - rplots.map_capacity_markers(case=casedir, level='r', year=year, f=f, ax=ax) + case=case, level='r', year=year, alpha=0.5, lw=0.15) + rplots.map_capacity_markers(case=case, level='r', year=year, f=f, ax=ax) savename = 'map_units-gencap-transcap.png' if write: plt.savefig(os.path.join(savepath, savename)) @@ -701,14 +765,14 @@ for (val,tech,cmap,vmax) in [ ('site_cap','upv',plt.cm.Oranges,400), ('site_cap','wind-ons',plt.cm.Blues,400), - ('site_hybridization',None,plt.cm.gist_earth_r,1), + ('site_hybridization',None,cmap,1), ('site_pv_fraction',None,plt.cm.turbo,1), - ('site_spurcap',None,plt.cm.gist_earth_r,400), + ('site_spurcap',None,cmap,400), ('site_gir','upv',plt.cm.turbo,2), ('site_gir','wind-ons',plt.cm.turbo,2), ]: f,ax = rplots.map_hybrid_pv_wind( - case=casedir, + case=case, year=year, val=val, tech=tech, cmap=cmap, vmax=vmax, markersize=10.75, diff --git a/runbatch.py b/runbatch.py index 598cb16..ca5e988 100644 --- a/runbatch.py +++ b/runbatch.py @@ -265,6 +265,11 @@ def check_compatibility(sw): ) raise Exception(err) + ### Land use and reeds_to_rev + if (int(sw['land_use_analysis'])) and (not int(sw['reeds_to_rev'])): + raise ValueError( + "'reeds_to_rev' must be enable for land_use analysis to run." + ) def solvestring_sequential( batch_case, caseSwitches, @@ -278,7 +283,7 @@ def solvestring_sequential( * caseSwitches: loaded from {batch_case}/inputs_case/switches.csv """ savefile = f"{batch_case}_{cur_year}i{iteration}" - _stress_year = f"{prev_year}i0" if stress_year is None else stress_year + _stress_year = f"{cur_year}i0" if stress_year is None else stress_year out = ( "gams d_solveoneyear.gms" + (" license=gamslice.txt" if hpc else '') @@ -976,7 +981,7 @@ def isnumeric(x): siteSwitches = siteSwitches.merge(binSwitches, on=['tech'], how='left') # Expand on reV path based on where this run is happening - #For running hourlize on the HPC link to shared-projects folder + # when running on the HPC this links to the shared-projects folder if os.environ.get('NREL_CLUSTER') == 'kestrel': hpc_path = '/projects/shared-projects-reeds/reeds/Supply_Curve_Data' else: @@ -995,9 +1000,12 @@ def isnumeric(x): siteSwitches['rev_case'] = siteSwitches['rev_path'].apply(lambda row: os.path.basename(row)) siteSwitches['sc_path'] = siteSwitches['sc_path'].apply(lambda row: os.path.join(rev_prefix,row)) - siteSwitches['hpc_path'] = siteSwitches['rev_path'].apply(lambda row: os.path.join(hpc_path,row)) - siteSwitches[['tech','access_switch','access_case','bins','rev_case', - 'sc_path','hpc_path','hpc_sc_file','cf_path','original_rev_folder'] + siteSwitches['hpc_rev_path'] = siteSwitches['rev_path'].apply(lambda row: os.path.join(hpc_path,row)) + siteSwitches['hpc_sc_file'] = siteSwitches['sc_file'].apply(lambda row: "" if pd.isnull(row) else os.path.join(hpc_path,row)) + siteSwitches['sc_file'] = siteSwitches['sc_file'].apply(lambda row: "" if pd.isnull(row) else os.path.join(rev_prefix,row)) + + siteSwitches[['tech','access_switch','access_case','bins','sc_path','rev_case', + 'hpc_rev_path','sc_file','hpc_sc_file','cf_path','original_rev_folder'] ].to_csv(os.path.join(inputs_case,'rev_paths.csv'), index=False) #%% Set up the meta.csv file to track repo information and runtime @@ -1188,7 +1196,9 @@ def isnumeric(x): 'hourly_repperiods', 'aggregate_regions', ]: + OPATH.writelines(f"echo {'-'*12+'-'*len(s)}\n") OPATH.writelines(f"echo 'starting {s}.py'\n") + OPATH.writelines(f"echo {'-'*12+'-'*len(s)}\n") OPATH.writelines( f"python {os.path.join(casedir,'input_processing',s)}.py {reeds_path} {inputs_case} {tolog}\n") OPATH.writelines(writescripterrorcheck(s)+'\n') diff --git a/runfiles.csv b/runfiles.csv index 9c79bf3..933cc53 100644 --- a/runfiles.csv +++ b/runfiles.csv @@ -16,7 +16,7 @@ acp_prices.csv,inputs/state_policies/acp_prices.csv,.csv,ignore,ignore,st,st,,1, agglevels.csv,,.csv,ignore,ignore,ignore,,,,0,,,,,,done aggreg.csv,,.csv,ignore,ignore,ignore,,,0,0,,,,,,done aggreg2anchorreg.csv,,.csv,ignore,ignore,ignore,,,0,0,,,,,,done -alpha.csv,,.csv,ignore,ignore,,,,1,0,,,,,,done +alpha.csv,inputs/fuelprices/alpha_{ngscen}.csv,.csv,ignore,ignore,wide,t,,1,0,,,,,,done bio_supplycurve.csv,inputs/supplycurvedata/bio_supplycurve.csv,.csv,ignore,ignore,usda_region,,,,0,,,,,,done bir.csv,,.csv,ignore,ignore,,,,0,0,,,,,,done can_exports.csv,inputs/canada_imports/can_exports.csv,.csv,sum,translinesize,r,wide,,1,0,,,,,,done @@ -33,7 +33,7 @@ cap_hyd_szn_adj.csv,,.csv,mean,uniform,r,"*i,szn",,0,0,,1,,,,done capnonrsc.csv,,.csv,sum,ignore,r,i,,0,0,,1,,,,‘ignore’ in disaggfunc because data will be written in county resolution by writecapdat.py cappayments_ba.csv,inputs/capacitydata/cappayments_ba.csv,.csv,ignore,ignore,,*r,,0,0,,,,,,not done but not used caprsc.csv,,.csv,sum,ignore,r,i,,0,0,,1,,,,‘ignore’ in disaggfunc because data will be written in county resolution by writecapdat.py -capture_rates.csv,,.csv,ignore,ignore,,,,,0,,,,,,done +capture_rates.csv,inputs/carbonconstraints/capture_rates_{GSw_CCS_Rate}.csv,.csv,ignore,ignore,,,,1,0,,,,,,done ccmult.csv,,.csv,ignore,ignore,,,,,0,,,,,,done ccreg.csv,,.csv,ignore,ignore,,,,,0,,,,,,done ccs_link.csv,inputs/carbonconstraints/ccs_link.csv,.csv,ignore,ignore,,,,,0,,,,,,done @@ -42,8 +42,8 @@ cd_beta0.csv,inputs/fuelprices/cd_beta0.csv,.csv,ignore,ignore,*cendiv,,,,0,,,,, cd_beta0_allsector.csv,inputs/fuelprices/cd_beta0_allsector.csv,.csv,ignore,ignore,*cendiv,,,,0,,,,,,done cendivweights.csv,inputs/fuelprices/cendivweights.csv,.csv,mean,uniform,r_cendiv,wide,,1,0,,,,,,Includes two region definitions; ‘ignore’ in disaggfunc because already at county resolution in inputs folder ces_fraction.csv,inputs/state_policies/ces_fraction.csv,.csv,ignore,ignore,st,st,,1,0,,,,,,done -cf_vre.csv,,.csv,mean_cap,ignore,r,"*i,h",*i,0,0,,1,,,,'ignore’ in disaggfunc because data will be written in correct spatial resolution by hourly_writetimeseries in hourly_repperiods.py cf_hyd.csv,,.csv,mean,uniform,r,"*i,szn,t",,0,0,,1,,,,done +cf_vre.csv,,.csv,mean_cap,ignore,r,"*i,h",*i,0,0,,1,,,,'ignore’ in disaggfunc because data will be written in correct spatial resolution by hourly_writetimeseries in hourly_repperiods.py climate_heuristics_finalyear.csv,,.csv,ignore,ignore,,,,,0,,,,,,done climate_heuristics_yearfrac.csv,,.csv,ignore,ignore,,,,,0,,,,,,done climate_hydadjann.csv,,.csv,ignore,ignore,,r,,1,0,,,,,,not done but rarely used; ignore for now @@ -56,8 +56,10 @@ co2_cap.csv,,.csv,ignore,ignore,,,,0,0,,,,,,done co2_capture_incentive.csv,,.csv,ignore,ignore,,,,,0,,,,,,done co2_site_char.csv,inputs/ctus/co2_site_char.csv,.csv,ignore,ignore,cs,,,0,,,,,,,done co2_tax.csv,,.csv,ignore,ignore,,,,0,,,,,,,done -coal_fom_adj.csv,inputs/capacitydata/coal_fom_adj.csv,.csv,ignore,ignore,,,,0,,,,,,,done -construction_times.csv,,.csv,ignore,ignore,,,,,0,,,,,,done +coal_fom_adj.csv,inputs/capacitydata/coal_fom_adj.csv,.csv,ignore,ignore,,,,,,,,,,,done +coal_price.csv,inputs/fuelprices/coal_{coalscen}.csv,.csv,ignore,ignore,wide,year,,1,0,,,,,,done +construction_schedules.csv,inputs/financials/construction_schedules_{construction_schedules_suffix}.csv,.csv,ignore,ignore,,,,1,0,,,,,,done +construction_times.csv,inputs/financials/construction_times_{construction_times_suffix}.csv,.csv,ignore,ignore,,,,,0,,,,,,done consume_char_low.csv,inputs/consume/consume_char_low.csv,.csv,ignore,ignore,,"*i,t,parameter",,0,0,,,,,,done consume_char_ref.csv,inputs/consume/consume_char_ref.csv,.csv,ignore,ignore,,"*I,t,parameter",,0,0,,,,,,done consumechardac.csv,,.csv,ignore,ignore,,"*i,t,variable",,0,0,,,,,,done @@ -68,19 +70,20 @@ cost_opres_default.csv,inputs/plant_characteristics/cost_opres_default.csv,.csv, cost_opres_market.csv,inputs/plant_characteristics/cost_opres_market.csv,.csv,ignore,ignore,,,,,0,,,,,,done cost_vom.csv,,.csv,mean,ignore,r,"i,v,t",,0,0,,1,,,,done (ReEDS-to-PLEXOS output) cost_vom_mult.csv,inputs/waterclimate/cost_vom_mult.csv,.csv,ignore,ignore,,,,,0,,,,,,done -crf_co2_incentive.csv,,.csv,ignore,ignore,,,,,0,,,,,,done crf.csv,,.csv,ignore,ignore,,,,0,,,,,,,done +crf_co2_incentive.csv,,.csv,ignore,ignore,,,,,0,,,,,,done csapr_group1_ex.csv,inputs/csapr/csapr_group1_ex.csv,.csv,ignore,ignore,*st,,,,0,,,,,,done csapr_group2_ex.csv,inputs/csapr/csapr_group2_ex.csv,.csv,ignore,ignore,*st,,,,0,,,,,,done csapr_ozone_season.csv,inputs/csapr/csapr_ozone_season.csv,.csv,ignore,ignore,st,,,,0,,,,,,done +currency_incentives.csv,inputs/financials/currency_incentives.csv,.csv,ignore,ignore,,,,,0,,,,,,done d_osprey.csv,,.csv,ignore,ignore,,,,,0,,,,,,done -d_szn.csv,,.csv,ignore,ignore,,,,,0,,,,,,done +d_szn.csv,inputs/variability/d_szn_{osprey_num_years}.csv,.csv,ignore,ignore,,,,,0,,,,,,done dac_assumptions.csv,,.csv,ignore,ignore,,,,,0,,,,,,done -dac_gas.csv,,.csv,ignore,ignore,,,,0,,,,,,,done +dac_gas.csv,inputs/consume/dac_gas_{GSw_DAC_Gas_Case}.csv,.csv,ignore,ignore,,,,1,0,,,,,,done deflator.csv,inputs/deflator.csv,.csv,ignore,ignore,,,,,0,,,,,,done degradation_adj.csv,,.csv,ignore,ignore,,,,,0,,,,,,done -degradation_annual.csv,,.csv,ignore,ignore,,,,,0,,,,,,done -depreciation_schedules.csv,,.csv,ignore,ignore,,,,,0,,,,,,done +degradation_annual.csv,inputs/degradation/degradation_annual_{degrade_suffix}.csv,.csv,ignore,ignore,,,,,0,,,,,,done +depreciation_schedules.csv,inputs/financials/depreciation_schedules_{depreciation_schedules_suffix}.csv,.csv,ignore,ignore,,,,1,0,,,,,,done df_capex_init.csv,postprocessing/retail_rate_module/calc_historical_capex/df_capex_init.csv,.csv,sum,geosize,region,"i,t",,0,0,,,,,,done disagg_geosize.csv,inputs/disaggregation/disagg_geosize.csv,.csv,ignore,ignore,,,,,0,,,,,,done disagg_hydroexist.csv,inputs/disaggregation/disagg_hydroexist.csv,.csv,ignore,ignore,,,,,0,,,,,,done @@ -88,38 +91,49 @@ disagg_population.csv,inputs/disaggregation/disagg_population.csv,.csv,ignore,ig disagg_translinesize.csv,inputs/disaggregation/disagg_translinesize.csv,.csv,ignore,ignore,,,,,0,,,,,,done distance_reinforcement.csv,,.csv,ignore,ignore,r,"*i,rscbin,miles",*i,0,0,,1,,,,done distance_spur.csv,,.csv,ignore,ignore,r,"*i,rscbin,miles",*i,0,0,,1,,,,done -distPVcap.csv,,.csv,sum,ignore,Unnamed: 0,wide,,1,0,,,,,,done -distPVCF_hourly.csv,,.csv,mean,ignore,Unnamed: 0,wide,,1,0,,,,,,TODO: change to capacity-weighted (using straight mean for now) -dr_dec.csv,,.csv,mean,population,wide,"i,hour,year",,1,0,,1,,,,done +distPVcap.csv,inputs/dGen_Model_Inputs/{distpvscen}/distPVcap_{distpvscen}.csv,.csv,sum,ignore,Unnamed: 0,wide,,1,0,,,,,,done +distPVCF_hourly.csv,inputs/dGen_Model_Inputs/{distpvscen}/distPVCF_hourly_{distpvscen}.csv,.csv,mean,ignore,Unnamed: 0,wide,,1,0,,,,,,TODO: change to capacity-weighted (using straight mean for now) +dollaryear_fuel.csv,inputs/fuelprices/dollaryear.csv,.csv,ignore,ignore,,,,,0,,,,,,done +dr_dec.csv,inputs/demand_response/dr_decrease_profile_{drscen}.csv,.csv,mean,population,wide,"i,hour,year",,1,0,,,,,,done dr_decrease.csv,,.csv,mean,population,r,"**i,h",,0,0,,1,,,,done dr_hrs.csv,,.csv,ignore,ignore,,,,,0,,,,,,done -dr_inc.csv,,.csv,mean,population,wide,"i,hour,year",,1,0,,1,,,,done +dr_inc.csv,inputs/demand_response/dr_increase_profile_{drscen}.csv,.csv,mean,population,wide,"i,hour,year",,1,0,,,,,,done dr_increase.csv,,.csv,mean,population,r,"**i,h",,0,0,,1,,,,done -dr_shed.csv,,.csv,ignore,ignore,,,,,0,,,,,,done -dr_shifts.csv,,.csv,ignore,ignore,,,,,0,,,,,,done -dr_types.csv,,.csv,ignore,ignore,,,,,0,,,,,,done +dr_shed.csv,inputs/demand_response/dr_shed_{drscen}.csv,.csv,ignore,ignore,,,,,0,,,,,,done +dr_shifts.csv,inputs/demand_response/dr_shifts_{drscen}.csv,.csv,ignore,ignore,,,,,0,,,,,,done +dr_types.csv,inputs/demand_response/dr_types_{drscen}.csv,.csv,ignore,ignore,,,,,0,,,,,,done emit_rate.csv,,.csv,ignore,ignore,,"e,i,v,r",,0,0,,,,,,done (ReEDS-to-PLEXOS output) emit_scale.csv,inputs/carbonconstraints/emit_scale.csv,.csv,ignore,ignore,,,,,0,,,,,,done emitrate.csv,inputs/carbonconstraints/emitrate.csv,.csv,ignore,ignore,,,,,0,,,,,,done +eval_period_adj_mult.csv,,.csv,ignore,ignore,,,,,0,,,,,,done evmc_baseline_load.csv,,.csv,sum,population,*r,"h,t",,0,0,,1,,,,done -evmc_shape_profile_decrease.csv,,.csv,mean,population,wide,"i,hour,year",,1,0,,1,,,,done evmc_shape_generation.csv,,.csv,mean,population,r,"**i,h",,0,0,,1,,,,done -evmc_shape_profile_increase.csv,,.csv,mean,population,wide,"i,hour,year",,1,0,,1,,,,done evmc_shape_load.csv,,.csv,mean,population,r,"**i,h",,0,0,,1,,,,done +evmc_shape_decrease_profile.h5,inputs/demand_response/evmc_shape_decrease_profile_{evmcscen}.h5,.h5,ignore,ignore,wide,"i,hour,year",,1,0,,,,,,done +evmc_shape_increase_profile.h5,inputs/demand_response/evmc_shape_increase_profile_{evmcscen}.h5,.h5,ignore,ignore,wide,"i,hour,year",,1,0,,,,,,done +evmc_shape_profile_decrease.csv,,.csv,mean,population,wide,"i,hour,year",,1,0,,1,,,,done +evmc_shape_profile_increase.csv,,.csv,mean,population,wide,"i,hour,year",,1,0,,1,,,,done evmc_storage_charge.csv,,.csv,mean,population,r,"**i,h,t",,0,0,,1,,,,done +evmc_storage_decrease_profile.h5,inputs/demand_response/evmc_storage_decrease_profile_{evmcscen}.h5,.h5,ignore,ignore,wide,"i,hour,year",,1,0,,,,,,done evmc_storage_discharge.csv,,.csv,mean,population,r,"**i,h,t",,0,0,,1,,,,done -evmc_storage_profile_increase.csv,,.csv,mean,population,wide,"i,hour,year",,1,0,,1,,,,done -evmc_storage_profile_decrease.csv,,.csv,mean,population,wide,"i,hour,year",,1,0,,1,,,,done evmc_storage_energy.csv,,.csv,mean,population,r,"**i,h",,0,0,,1,,,,done -eval_period_adj_mult.csv,,.csv,ignore,ignore,,,,,0,,,,,,done +evmc_storage_energy.h5,inputs/demand_response/evmc_storage_energy_{evmcscen}.h5,.h5,ignore,ignore,wide,"i,hour,year",,1,0,,,,,,done +evmc_storage_increase_profile.h5,inputs/demand_response/evmc_storage_increase_profile_{evmcscen}.h5,.h5,ignore,ignore,wide,"i,hour,year",,1,0,,,,,,done +evmc_storage_profile_decrease.csv,,.csv,mean,population,wide,"i,hour,year",,1,0,,1,,,,done +evmc_storage_profile_increase.csv,,.csv,mean,population,wide,"i,hour,year",,1,0,,1,,,,done +financials_hydrogen.csv,inputs/financials/financials_hydrogen.csv,.csv,ignore,ignore,,,,,0,,,,,,done +financials_sys.csv,inputs/financials/financials_sys_{financials_sys_suffix}.csv,.csv,ignore,ignore,,,,,0,,,,,,done +financials_tech.csv,inputs/financials/financials_tech_{financials_tech_suffix}.csv,.csv,ignore,ignore,,,,,0,,,,,,done +financials_transmission.csv,inputs/financials/financials_transmission_{financials_trans_suffix}.csv,.csv,ignore,ignore,,,,,0,,,,,,done financing_risk_mult.csv,,.csv,ignore,ignore,,,,,0,,,,,,done +firm_transfer_limit.csv,,.csv,ignore,ignore,,,,,0,,1,,,,done firstyear.csv,inputs/capacitydata/firstyear.csv,.csv,ignore,ignore,,,,,0,,,,,,done flex_frac_all.csv,,.csv,mean,population,r,"*flextype,h,wide",,1,0,,1,,,,done forced_retirements.csv,inputs/state_policies/forced_retirements.csv,.csv,min,uniform,r,"*i,t",,0,0,,,,,,done forceperiods.csv,,.csv,ignore,ignore,,,,,,,,,,,done fprice.csv,,.csv,mean,ignore,r,"t,wide",,1,0,,1,,,,'ignore’ in disaggfunc because according to Wesley the existing data will work for the input data that is at census division level -frac_h_month_weights.csv,,.csv,ignore,ignore,,,,,,,,,,,done frac_h_ccseason_weights.csv,,.csv,ignore,ignore,,,,,,,,,,,done +frac_h_month_weights.csv,,.csv,ignore,ignore,,,,,,,,,,,done frac_h_quarter_weights.csv,,.csv,ignore,ignore,,,,,,,,,,,done fuel_price.csv,,.csv,ignore,ignore,,"i,r",,0,0,,,,,,done (ReEDS-to-PLEXOS output) futurefiles.csv,,.csv,ignore,ignore,,,,,0,,,,,,done @@ -138,10 +152,10 @@ growth_limit_absolute.csv,inputs/growth_constraints/growth_limit_absolute.csv,.c growth_penalty.csv,inputs/growth_constraints/growth_penalty.csv,.csv,ignore,ignore,,,,,0,,,,,,done gswitches.csv,,.csv,ignore,ignore,,,,,0,,,,,,done h_actualszn.csv,,.csv,ignore,ignore,,,,,0,,,,,,done +h_ccseason_prm.csv,inputs/variability/h_ccseason_prm.csv,.csv,ignore,ignore,,,,,0,,,,,,done h_dt_szn.csv,inputs/variability/h_dt_szn.csv,.csv,ignore,ignore,,,,,0,,,,,,done h_szn.csv,inputs/variability/h_szn.csv,.csv,ignore,ignore,,,,0,0,,,,,,done h_szn_end.csv,inputs/variability/h_szn_end.csv,.csv,ignore,ignore,,,,,0,,,,,,done -h_ccseason_prm.csv,inputs/variability/h_ccseason_prm.csv,.csv,ignore,ignore,,,,,0,,,,,,done h_szn_start.csv,inputs/variability/h_szn_start.csv,.csv,ignore,ignore,,,,,0,,,,,,done h2_ba_share.csv,inputs/consume/h2_ba_share.csv,.csv,sum,geosize,*r,t,,0,0,,,,,,done h2_compressor_cap_cost_mult.csv,,.csv,ignore,ignore,,,,,,,1,,,,done @@ -157,8 +171,8 @@ heat_rate_mult.csv,inputs/waterclimate/heat_rate_mult.csv,.csv,ignore,ignore,,,, heat_rate_penalty_spin.csv,inputs/plant_characteristics/heat_rate_penalty_spin.csv,.csv,ignore,ignore,,,,,0,,,,,,done hierarchy.csv,,.csv,first,ignore,*r,"nercr,transreg,cendiv,st,interconnect,country,usda_region,aggreg,ccreg",,0,0,,,,,,done hintage_data.csv,,.csv,ignore,ignore,,,,,0,,,,,,done (handled separately in WriteHintage.py) -hmap_myr.csv,,.csv,ignore,ignore,,,,,0,,,,,,done hmap_7yr.csv,,.csv,ignore,ignore,,,,,0,,,,,,done +hmap_myr.csv,,.csv,ignore,ignore,,,,,0,,,,,,done hour_szn_group.csv,,.csv,ignore,ignore,,,,,,,,,,,done hourly_operational_characteristics.csv,inputs/variability/hourly_operational_characteristics.csv,.csv,ignore,ignore,,,,,0,,,,,,done hourly_szn_end.csv,,.csv,ignore,ignore,,,,,,,,,,,done @@ -167,18 +181,20 @@ hours_hourly.csv,,.csv,ignore,ignore,,,,,,,,,,,done hset_hourly.csv,,.csv,ignore,ignore,,,,,,,,,,,done hyd_add_upg_cap.csv,inputs/supplycurvedata/hyd_add_upg_cap.csv,.csv,sum,hydroexist,r,"i,rscbin,wide",,1,0,,,,,,done hyd_fom.csv,inputs/hydrodata/hyd_fom.csv,.csv,mean,uniform,wide,i,,1,0,,,,,,Email from Wesley says to use uniform disaggfunc due to data coming from different places (can’t pull all required data from plant database like we wanted) -hydcf.csv,,.csv,mean,uniform,r,"*i,month,t",,0,0,,1,,,,done hydcapadj.csv,,.csv,mean,uniform,r,"*i,month",,0,0,,1,,,,done +hydcf.csv,,.csv,mean,uniform,r,"*i,month,t",,0,0,,1,,,,done hydro_mingen.csv,inputs/hydrodata/hydro_mingen.csv,.csv,mean,uniform,r,"*i,quarter",,0,0,,,,,,might be better to do something capacity-weighted hydrocapcostmult.csv,,.csv,ignore,ignore,,,,1,0,,,,,,done hydrofrac_policy.csv,inputs/state_policies/hydrofrac_policy.csv,.csv,ignore,ignore,st,"RPS_All,CES",,,0,,,,,,done +hydrogen_price.csv,inputs/fuelprices/h2-ct_{h2ctfuelscen}.csv,.csv,ignore,ignore,,,,0,0,,,,,,done i_coolingtech_watersource.csv,inputs/waterclimate/i_coolingtech_watersource.csv,.csv,ignore,ignore,,,,,0,,,,,,done i_coolingtech_watersource_link.csv,inputs/waterclimate/i_coolingtech_watersource_link.csv,.csv,ignore,ignore,,,,,0,,,,,,done i_coolingtech_watersource_upgrades.csv,inputs/upgrades/i_coolingtech_watersource_upgrades.csv,.csv,ignore,ignore,,,,,0,,,,,,done i_coolingtech_watersource_upgrades_link.csv,inputs/upgrades/i_coolingtech_watersource_upgrades_link.csv,.csv,ignore,ignore,,,,,0,,,,,,done ice_fom.csv,inputs/plant_characteristics/ice_fom.csv,.csv,ignore,ignore,,,,0,,,,,,,done -index_hr_map.csv,,.csv,ignore,ignore,,,,,0,,,,,,done -inflation.csv,,.csv,ignore,ignore,,,,0,0,,,,,,done +incentives.csv,inputs/financials/incentives_{incentives_suffix}.csv,.csv,ignore,ignore,,,,,0,,,,,,done +index_hr_map.csv,inputs/variability/index_hr_map_{osprey_num_years}.csv,.csv,ignore,ignore,,,,,0,,,,,,done +inflation.csv,inputs/financials/inflation_{inflation_suffix}.csv,.csv,ignore,ignore,,,,0,0,,,,,,done itc_frac_monetized.csv,,.csv,ignore,ignore,,,,,0,,,,,,done itc_fractions.csv,,.csv,ignore,ignore,,"i,country,itc_tax_equity_penalty",,0,0,,,,,,done ivt.csv,,.csv,ignore,ignore,,,,,0,,,,,,done @@ -188,7 +204,7 @@ ivt_step.csv,inputs/userinput/ivt_step.csv,.csv,ignore,ignore,,,,,0,,,,,,done load_2010.csv,,.csv,sum,ignore,r,wide,,1,0,,1,,,,'ignore’ in disaggfunc because load will already be in correct spatial resolution load_allyear.csv,,.csv,sum,ignore,*r,"h,t",,0,0,,1,,,,'ignore’ in disaggfunc because load will already be in correct spatial resolution load_hourly.csv,,.csv,sum,population,r,*h,,0,0,,1,,,,done -load_multiplier.csv,,.csv,ignore,ignore,,cendiv,,1,0,,,,,,done +load_multiplier.csv,inputs/loaddata/demand_{demandscen}.csv,.csv,ignore,ignore,,cendiv,,1,0,,,,,,done load_multiplier_r.csv,,.csv,ignore,ignore,,,,1,0,,,,,,done maxage.csv,inputs/capacitydata/maxage.csv,.csv,ignore,ignore,,,,,0,,,,,,done methane_leakage_rate.csv,,.csv,ignore,ignore,,,,0,0,,,,,,done @@ -203,27 +219,27 @@ nat_gen_tech_frac.csv,inputs/national_generation/nat_gen_tech_frac.csv,.csv,igno national_gen_frac_allScen.csv,inputs/national_generation/national_gen_frac_allScen.csv,.csv,ignore,ignore,,,,1,0,,,,,,done national_rps_frac_allScen.csv,inputs/RPSdata/national_rps_frac_allScen.csv,.csv,ignore,ignore,,,,1,0,,,,,,done net_trade_can.csv,,.csv,sum,translinesize,*r,"h,t",,0,0,,1,,,,done -nexth_actualszn.csv,,.csv,ignore,ignore,,,,,0,,,,,,done nexth.csv,,.csv,ignore,ignore,,,,,0,,,,,,done +nexth_actualszn.csv,,.csv,ignore,ignore,,,,,0,,,,,,done ng_crf_penalty.csv,,.csv,ignore,ignore,,,,0,0,,,,,,done ng_crf_penalty_st.csv,inputs/state_policies/ng_crf_penalty_st.csv,.csv,ignore,ignore,st,"*t,value (CRF_X / CRF_20)",,0,0,,,,,,done -ng_demand_elec.csv,,.csv,ignore,ignore,cendiv,,,1,0,,1,,,,done -ng_demand_tot.csv,,.csv,ignore,ignore,cendiv,,,1,0,,1,,,,done -ng_price_cendiv.csv,,.csv,ignore,ignore,cendiv,,,1,0,,1,,,,done +ng_demand_elec.csv,inputs/fuelprices/ng_demand_{ngscen}.csv,.csv,ignore,ignore,wide,year,,1,0,,,,,,done +ng_demand_tot.csv,inputs/fuelprices/ng_tot_demand_{ngscen}.csv,.csv,ignore,ignore,wide,year,,1,0,,,,,,done +ng_price_cendiv.csv,inputs/fuelprices/ng_{ngscen}.csv,.csv,ignore,ignore,wide,year,,1,0,,,,,,done nuclear_ba_ban_list.csv,inputs/state_policies/nuclear_ba_ban_list.csv,.csv,first,uniform,*r,,,0,0,,,,,,done nuclear_subsidies.csv,inputs/state_policies/nuclear_subsidies.csv,.csv,ignore,ignore,*st,year,,0,0,,,,,,done nuke_fom_adj.csv,inputs/capacitydata/nuke_fom_adj.csv,.csv,ignore,ignore,,,,0,,,,,,,done -numhours_nexth.csv,,.csv,ignore,ignore,,,,,0,,1,,,,done numhours.csv,,.csv,ignore,ignore,,,,,0,,1,,,,done -offshore_req.csv,,.csv,ignore,ignore,st,,,1,0,,,,,,done +numhours_nexth.csv,,.csv,ignore,ignore,,,,,0,,1,,,,done +offshore_req.csv,inputs/state_policies/offshore_req_{GSw_OfsWindForceScen}.csv,.csv,ignore,ignore,st,,,1,0,,,,,,done ofswind_rsc_mult.csv,,.csv,ignore,ignore,,,,1,0,,,,,,done oosfrac.csv,inputs/state_policies/oosfrac.csv,.csv,ignore,ignore,*st,,,,0,,,,,,done opres_periods.csv,inputs/reserves/opres_periods.csv,.csv,ignore,ignore,,,,,0,,,,,,done orperc.csv,inputs/orperc.csv,.csv,ignore,ignore,,,,,0,,,,,,done outage_forced.csv,inputs/plant_characteristics/outage_forced.csv,.csv,ignore,ignore,,,,,0,,,,,,done outage_planned.csv,inputs/plant_characteristics/outage_planned.csv,.csv,ignore,ignore,,,,,0,,,,,,done -peak_h.csv,,.csv,sum,ignore,r,"h,wide",,1,0,,1,,,,done peak_ccseason.csv,,.csv,sum,ignore,*r,"ccseason,t",,0,0,,1,,,,done (ok because it's load during peak NERC hour) +peak_h.csv,,.csv,sum,ignore,r,"h,wide",,1,0,,1,,,,done peakload.csv,,.csv,ignore,ignore,,,,,0,,1,,,,done period_szn.csv,,.csv,ignore,ignore,,,,,0,,,,,,done period_weights.csv,,.csv,ignore,ignore,,,,,0,,,,,,done @@ -233,7 +249,7 @@ plantcharout.csv,,.csv,ignore,ignore,,"0,2",,0,,,,,,,done poi_cap_init.csv,,.csv,sum,ignore,*r,,,0,0,,1,,,,'ignore’ in disaggfunc because data will be written in county resolution by writecapdat.py prescribed_nonRSC.csv,,.csv,sum,ignore,r,"t,i",,0,0,,1,,,,‘ignore’ in disaggfunc because data will be written in county resolution by writecapdat.py prescribed_rsc.csv,,.csv,sum,ignore,r,"t,i",,0,0,,1,,,,‘ignore’ in disaggfunc because data will be written in county resolution by writecapdat.py -prm_annual.csv,,.csv,ignore,ignore,*nercr,,,0,0,,,,,,done +prm_annual.csv,,.csv,ignore,ignore,*nercr,,,0,0,,1,,,,done psh_sc_duration.csv,,.csv,ignore,ignore,,,,,,,1,,,,done ptc_value_scaled.csv,,.csv,ignore,ignore,,"*i,v,t",,0,0,,,,,,done ptc_values.csv,,.csv,ignore,ignore,,"i,v,t,wide",,1,0,,,,,,only for retail @@ -244,13 +260,13 @@ pvb_ilr.csv,,.csv,ignore,ignore,,,,,0,,,,,,done pvbcapcostmult.csv,,.csv,ignore,ignore,,,,0,0,,,,,,done pvf_cap.csv,,.csv,ignore,ignore,,,,0,,,,,,,done pvf_onm_int.csv,,.csv,ignore,ignore,,,,0,,,,,,,done -r.csv,,.csv,first,ignore,0,,,0,,,1,,,,'ignore’ in disaggfunc because this file is dynamic to the user-defined spatial aggregation level +r.csv,,.csv,first,ignore,0,,,0,,,1,,,,ignore’` in disaggfunc because this file is dynamic to the user-defined spatial aggregation level r_ba.csv,,.csv,ignore,ignore,,,,,,,,,,,done r_cendiv.csv,,.csv,ignore,ignore,,,,,,,,,,,done r_county.csv,,.csv,ignore,ignore,,,,,,,,,,,done r_cs.csv,inputs/ctus/r_cs.csv,.csv,first,uniform,*r,cs,,0,0,,,,,,done r_cs_distance_mi.csv,inputs/ctus/r_cs_distance_mi.csv,.csv,mean,uniform,*r,cs,,0,0,,,,,,may need special handling given subset on both r and cs for now should work -r_rr_adj.csv,,.csv,first,ignore,"*r,rr",,,0,0,,,,,,'ignore’ in disaggfunc because the spatial aggregation level of this file is controlled by the agglevel switch +r_rr_adj.csv,inputs/transmission/r_rr_adj_{lvl}.csv,.csv,first,ignore,"*r,rr",,,0,0,,,,,,'ignore’ in disaggfunc because the spatial aggregation level of this file is controlled by the agglevel switch ramprate.csv,inputs/plant_characteristics/ramprate.csv,.csv,ignore,ignore,,,,,0,,,,,,done ramptime.csv,inputs/reserves/ramptime.csv,.csv,ignore,ignore,,,,,0,,,,,,done rb.csv,,.csv,first,ignore,0,,,0,,,1,,,,'ignore’ in disaggfunc because this file is specifically a collection of all valid BA regions @@ -258,7 +274,7 @@ rb_aggreg.csv,,.csv,ignore,ignore,,,,,,,,,,,done recstyle.csv,inputs/state_policies/recstyle.csv,.csv,ignore,ignore,*st,"RPSCat,style",,,0,,,,,,done rectable.csv,inputs/state_policies/rectable.csv,.csv,ignore,ignore,st_st,st,,,0,,,,,,done reeds_region_tz_map.csv,inputs/variability/reeds_region_tz_map.csv,.csv,trans_lookup,ignore,r,,,0,0,,,,,,'ignore’ in disaggfunc because input file is already at county resolution -reg_cap_cost_mult.csv,,.csv,mean,uniform,r,*i,,0,0,,1,,,,done +reg_cap_cost_mult.csv,inputs/financials/reg_cap_cost_mult_{reg_cap_cost_mult_suffix}.csv,.csv,mean,uniform,r,*i,,0,0,,,,,,done region_definitions.csv,inputs/userinput/region_definitions.csv,.csv,ignore,ignore,,,,,0,,,,,,done region_map.csv,inputs/valuestreams/region_map.csv,.csv,ignore,ignore,,,,,0,,,,,,only for valuestreams regions.csv,,.csv,ignore,ignore,,,,,0,,,,,,not done but only for retail @@ -277,8 +293,8 @@ rsc_wsc.csv,,.csv,ignore,ignore,,,,,0,,,,,,done runfiles.csv,runfiles.csv,.csv,ignore,ignore,ignore,,,0,0,,,,,,so meta safe_harbor_max.csv,,.csv,ignore,ignore,,,,,0,,,,,,done scalars.csv,inputs/scalars.csv,.csv,ignore,ignore,,,,,0,,,,,,done -set_allh.csv,,.csv,ignore,ignore,,,,,0,,,,,,done set_actualszn.csv,,.csv,ignore,ignore,,,,,0,,,,,,done +set_allh.csv,,.csv,ignore,ignore,,,,,0,,,,,,done set_allszn.csv,,.csv,ignore,ignore,,,,,0,,,,,,done set_h.csv,inputs/variability/set_h.csv,.csv,ignore,ignore,,,,,,,,,,,done set_szn.csv,inputs/variability/set_szn.csv,.csv,ignore,ignore,,,,,,,,,,,done @@ -292,13 +308,14 @@ storage_duration.csv,inputs/storagedata/storage_duration.csv,.csv,ignore,ignore, storage_duration_pshdata.csv,inputs/storagedata/storage_duration_pshdata.csv,.csv,mean,uniform,r,"*i,v",,0,0,,,,,,probably better to do something capacity-weighted storage_mandates.csv,inputs/state_policies/storage_mandates.csv,.csv,ignore,ignore,*st,t,,0,0,,,,,,done storinmaxfrac.csv,inputs/storagedata/storinmaxfrac.csv,.csv,mean,uniform,r,"*i,v",,0,0,,,,,,done +stressperiods_seed.csv,,.csv,ignore,ignore,,,,,0,,,,,,done superpeak_hour_mapper.csv,inputs/variability/superpeak_hour_mapper.csv,.csv,ignore,ignore,,,,,0,,,,,,done supply_chain_adjust.csv,inputs/financials/supply_chain_adjust.csv,.csv,ignore,ignore,,,,,0,,,,,,done -stressperiods_seed.csv,,.csv,ignore,ignore,,,,,0,,,,,,done switches.csv,,.csv,ignore,ignore,,,,,0,,,,,,done tax_rate.csv,,.csv,ignore,ignore,,,,,0,,,,,,done -tc_phaseout_schedule.csv,,.csv,ignore,ignore,,,,,0,,,,,,done +tc_phaseout_schedule.csv,inputs/financials/tc_phaseout_schedule_{GSw_TCPhaseout_schedule}.csv,.csv,ignore,ignore,,,,,0,,,,,,done tech_resourceclass.csv,inputs/techs/tech_resourceclass.csv,.csv,ignore,ignore,,,,,0,,,,,,done +techs.csv,inputs/techs/techs_{techs_suffix}.csv,.csv,ignore,ignore,,,,,0,,,,,,done techs_banned.csv,inputs/state_policies/techs_banned.csv,.csv,ignore,ignore,wide_st,i,,,0,,,,,,done techs_banned_ces.csv,inputs/state_policies/techs_banned_ces.csv,.csv,ignore,ignore,wide_st,i,,,0,,,,,,done techs_banned_imports_rps.csv,inputs/state_policies/techs_banned_imports_rps.csv,.csv,ignore,ignore,wide_st,i,,,0,,,,,,done @@ -323,16 +340,17 @@ transmission_line_fom.csv,,.csv,trans_lookup,ignore,"*r,rr",trtype,,0,0,drop_dup unapp_water_sea_distr.csv,inputs/waterclimate/unapp_water_sea_distr.csv,.csv,mean,geosize,r,"wst,wide",,1,0,,,,,,done unbundled_limit_ces.csv,inputs/state_policies/unbundled_limit_ces.csv,.csv,ignore,ignore,st,,,,0,,,,,,done unbundled_limit_rps.csv,inputs/state_policies/unbundled_limit_rps.csv,.csv,ignore,ignore,st,,,,0,,,,,,done +unitdata.csv,inputs/capacitydata/ReEDS_generator_database_final_{unitdata}.csv,.csv,ignore,ignore,,,,,0,,,,,,done +unitsize.csv,inputs/plant_characteristics/unitsize.csv,.csv,ignore,ignore,,,,,0,,,,,,done upgrade_costs_ccs_coal.csv,,.csv,ignore,ignore,,,,,0,,,,,,done upgrade_costs_ccs_gas.csv,,.csv,ignore,ignore,,,,,0,,,,,,done upgrade_link.csv,inputs/upgrades/upgrade_link.csv,.csv,ignore,ignore,,,,,0,,,,,,done -upgrade_mult_final.csv,inputs/upgrades/upgrade_mult.csv,.csv,ignore,ignore,,,,,0,,,,,,done upgrade_mult.csv,inputs/upgrades/upgrade_mult.csv,.csv,ignore,ignore,,,,,0,,,,,,done +upgrade_mult_final.csv,inputs/upgrades/upgrade_mult.csv,.csv,ignore,ignore,,,,,0,,,,,,done upgrade_mult_reduced.csv,inputs/upgrades/upgrade_mult_reduced.csv,.csv,ignore,ignore,,,,,0,,,,,,done upgradelink_water.csv,inputs/upgrades/upgradelink_water.csv,.csv,ignore,ignore,,,,,0,,,,,,done upv_exog_cap.csv,,.csv,sum,ignore,r,"*i,rscbin,t",,0,0,,1,,,,handled in writesupplycurves.py -unitdata.csv,,.csv,ignore,ignore,,,,,0,,,,,,done -unitsize.csv,inputs/plant_characteristics/unitsize.csv,.csv,ignore,ignore,,,,,0,,,,,,done +uranium_price.csv,inputs/fuelprices/uranium_{uraniumscen}.csv,.csv,ignore,ignore,,,,0,0,,,,,,done va_ng_crf_penalty.csv,,.csv,ignore,ignore,,,,0,0,,,,,,done val_aggreg.csv,,.csv,ignore,ignore,,,,0,none,,,,,,done val_ba.csv,,.csv,ignore,ignore,,,,,,,1,,,,done @@ -355,9 +373,9 @@ water_with_cons_rate.csv,inputs/waterclimate/water_with_cons_rate.csv,.csv,mean, wind_retirements.csv,,.csv,sum,ignore,r,"i,v,wide",,1,0,,1,,,,‘ignore’ in disaggfunc because data will be written in county resolution by writecapdat.py windcfmult.csv,,.csv,ignore,ignore,,,,1,0,,,,,,done windcfout.csv,,.csv,ignore,ignore,,,,1,0,,,,,,done -wind-ofs_prescribed_builds.csv,,.csv,sum,ignore,region,year,,0,0,,,,,,'ignore’ in disaggfunc because data will be read in at the correct spatial resolution +wind-ofs_prescribed_builds.csv,inputs/capacitydata/wind-ofs_prescribed_builds_{GSw_SitingWindOfs}_{lvl}.csv,.csv,sum,ignore,region,year,,0,0,,,,,,'ignore’ in disaggfunc because data will be read in at the correct spatial resolution wind-ons_exog_cap.csv,,.csv,ignore,ignore,r,"*i,t",,0,0,,1,,,,handled in writesupplycurves.py -wind-ons_prescribed_builds.csv,,.csv,sum,ignore,region,year,,0,0,,,,,,'ignore’ in disaggfunc because data will be read in at the correct spatial resolution +wind-ons_prescribed_builds.csv,inputs/capacitydata/wind-ons_prescribed_builds_{GSw_SitingWindOns}_{lvl}.csv,.csv,sum,ignore,region,year,,0,0,,,,,,'ignore’ in disaggfunc because data will be read in at the correct spatial resolution windows_2100.csv,,.csv,ignore,ignore,,,,,0,,,,,,done windows_default.csv,,.csv,ignore,ignore,,,,,0,,,,,,done windows_step10.csv,,.csv,ignore,ignore,,,,,0,,,,,,done @@ -369,7 +387,7 @@ plexos_inputs.gdx,,.gdx,ignore,ignore,,,,,0,,,,,,done can_trade_8760.h5,,.h5,sum,translinesize,r,"h,t",,0,0,,1,,,,done (but weird that it ends up with more values) load.h5,,.h5,sum,ignore,wide,"year,hour",,1,keepindex,,1,,,,Disaggregation handled in LDC_prep.py recf.h5,,.h5,recf,ignore,wide,index,index,1,keepindex,,1,,,,done -csp.h5,,.h5,recf,ignore,wide,index,index,1,keepindex,,1,,,,done +csp.h5,,.h5,csp,ignore,wide,index,index,1,keepindex,,1,,,,done retail_depreciation_sch.h5,,.h5,ignore,ignore,,"i,t",,0,0,,,,,,done retail_eval_period.h5,,.h5,ignore,ignore,,"i,t",,0,0,,,,,,done gswitches.txt,,.txt,ignore,ignore,,,,,0,,,,,,done @@ -408,4 +426,4 @@ raw_value_streams.py,raw_value_streams.py,,ignore,ignore,,,,,,,,,,,done tc_phaseout.py,tc_phaseout.py,,ignore,ignore,,,,,,,,,,,done utilities.py,utilities.py,,ignore,ignore,,,,,,,,,,,done valuestreams.gms,valuestreams.gms,,ignore,ignore,,,,,,,,,,,done -valuestreams.py,valuestreams.py,,ignore,ignore,,,,,,,,,,,done \ No newline at end of file +valuestreams.py,valuestreams.py,,ignore,ignore,,,,,,,,,,,done diff --git a/srun_template.sh b/srun_template.sh index d90e3c3..ab9f07a 100644 --- a/srun_template.sh +++ b/srun_template.sh @@ -5,5 +5,5 @@ #SBATCH --ntasks-per-node=1 #SBATCH --mail-user=[your email address] #SBATCH --mail-type=BEGIN,END,FAIL -#SBATCH --mem=250000 # RAM in MB; up to 256000 for normal or 2000000 for bigmem on kestrel +#SBATCH --mem=248000 # RAM in MB; up to 248000 for normal or 2000000 for bigmem on kestrel # add >>> #SBATCH --qos=high <<< above for quicker launch at double AU cost diff --git a/tests/conftest.py b/tests/conftest.py index c44c479..65554ba 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,15 +1,17 @@ import os import json import pytest +from pathlib import Path DATA_TEST_DIR = "data" + def pytest_addoption(parser): parser.addoption("--casepath", action="store", help="File path to ReEDS run.") # Read file map -@pytest.fixture() +@pytest.fixture(scope="module", autouse=True) def fmap(request): tests_dir = os.path.dirname(request.module.__file__) data_filename = os.path.join(tests_dir, DATA_TEST_DIR, "r2x_files.json") @@ -20,4 +22,40 @@ def fmap(request): # Get folder of ReEDS path @pytest.fixture() def casepath(pytestconfig): - return pytestconfig.getoption("casepath") + return Path(pytestconfig.getoption("casepath")) + + +@pytest.fixture +def r2x_files(casepath, fmap): + file_list = [] + for _, i_dict in fmap.items(): + if i_dict.get("input"): + fpath = casepath.joinpath("inputs_case") + else: + fpath = casepath.joinpath("outputs") + file_list.append(fpath.joinpath(i_dict.get("fname"))) + return file_list + + +@pytest.hookimpl +def pytest_generate_tests(metafunc): + tests_dir = os.path.dirname(metafunc.module.__file__) + data_filename = os.path.join(tests_dir, DATA_TEST_DIR, "r2x_files.json") + casepath = Path(metafunc.config.getoption("casepath")) + with open(data_filename, "r") as f: + fmap = json.load(f) + file_list = [] + for _, i_dict in fmap.items(): + if i_dict.get("input"): + fpath = casepath.joinpath("inputs_case") + else: + fpath = casepath.joinpath("outputs") + + if not i_dict.get("optional", True) and i_dict.get("column_mapping"): + file_list.append(fpath.joinpath(i_dict.get("fname"))) + if "validation_files" in metafunc.fixturenames: + metafunc.parametrize( + "validation_files", + file_list, + ids=list(map(lambda fpath: fpath.stem, file_list)), # To actually know what file + ) diff --git a/tests/data/r2x_files.json b/tests/data/r2x_files.json index f46bff3..3b199bf 100644 --- a/tests/data/r2x_files.json +++ b/tests/data/r2x_files.json @@ -1,430 +1,338 @@ { "ba_tz": { "fname": "reeds_region_tz_map.csv", - "mandatory": true + "input": true, + "optional": true }, - "online_capacity": { - "fname": "cap_ivrt.csv", + "bfuel_price": { "column_mapping": { - "value": "capacity_MW" + "r": "region", + "t": "year", + "value": "fuel_price" }, - "column_index": [ - "year", - "tech", - "tech_class", - "region" - ], - "units": "MW", + "fname": "repbioprice.csv", "input": false, - "mandatory": true, - "description": "Online capacity in MW" + "optional": false }, - "new_capacity": { - "fname": "cap_new_ivrt.csv", + "cf": { + "fname": "recf.h5", + "input": true, + "optional": false + }, + "cf_adjustment": { "column_mapping": { - "value": "reeds_capacity_MW" + "value": "cf_adj" }, - "column_index": [ - "year", - "tech", - "tech_class", - "region" - ], - "units": "MW", + "fname": "cf_adj_t.csv", "input": false, - "mandatory": true, - "description": "New online capacity prescribed by ReEDS." + "optional": false }, - "retirements": { - "fname": "ret_ivrt.csv", + "cost_vom": { "column_mapping": { - "value": "retired_MW" + "i": "tech", + "r": "region", + "t": "year", + "v": "tech_vintage", + "value": "cost_vom" }, - "column_index": [ - "year", - "tech", - "tech_class", - "region" - ], - "units": "MW", - "description": "Planned and decision retirements" + "fname": "cost_vom.csv", + "input": false, + "optional": false }, - "hierarchy": { - "fname": "hierarchy.csv", + "emission_rates": { "column_mapping": { - "nercr": "nerc_region", - "transreg": "transmission_region" + "eall": "emission_type", + "i": "tech", + "r": "region", + "t": "year", + "v": "tech_vintage", + "value": "rate" }, - "column_index": [ - "region", - "cendiv", - "st" - ], - "input": true, - "mandatory": true + "fname": "emit_rate.csv", + "input": false, + "optional": false }, - "tx_out": { - "fname": "tran_out.csv", + "forced_outages": { "column_mapping": { - "region": "region_from", - "rr": "region_to", - "value": "flow_MW" + "i": "tech", + "value": "forced_outage_rate" }, - "column_index": [ - "year", - "region_from", - "region_to", - "trtype" - ], - "units": "MW" - }, - "tran_cap_energy": { - "fname": "tran_cap_energy.csv", + "fname": "forced_outage.csv", + "input": false, + "optional": false + }, + "fuel_price": { "column_mapping": { - "region": "region_from", - "rr": "region_to", - "value": "max_active_power" + "i": "tech", + "r": "region", + "t": "year", + "value": "fuel_price" }, - "column_index": [ - "year", - "region_from", - "region_to", - "trtype" - ], - "mandatory": true - }, - "tranloss": { - "fname": "tranloss.csv", + "fname": "fuel_price.csv", + "input": false, + "optional": false + }, + "fuels": { "column_mapping": { - "region": "region_from", - "rr": "region_to", - "loss": "transmission_loss" + "f": "fuel", + "i": "tech" }, - "column_index": [ - "region_from", - "region_to", - "trtype" - ], - "mandatory": true, - "input": true + "fname": "fuel2tech.csv", + "input": false, + "optional": false }, "generation": { - "fname": "gen_h.csv", "column_mapping": { "value": "dispatch_MW" }, - "column_index": [ - "year", - "tech", - "region", - "h" - ], - "units": "MW", - "dtype": {} - }, - "technologies": { - "fname": "valcap_i.csv", - "units": "MW", - "input": true, - "mandatory": true + "fname": "gen_h.csv" }, - "technology_class": { + "h2_fuel_price": { "column_mapping": { - "uni": "tech_class" + "$2004/kg": "h2_price" }, - "fname": "v.csv", - "optional": true, - "input": false - }, - "years": { - "fname": "modeledyears.csv", - "column_mapping": {}, - "input": true, - "mandatory": true + "fname": "h2_price_month.csv", + "input": false, + "optional": true }, "heat_rate": { - "fname": "heat_rate.csv", "column_mapping": { + "i": "tech", + "r": "region", + "t": "year", + "v": "tech_vintage", "value": "heat_rate" }, - "column_index": [ - "year", - "tech", - "tech_class", - "region" - ], - "units": "MMBTu/MWh", - "input": true, - "mandatory": true + "fname": "heat_rate.csv", + "input": false, + "optional": false }, - "bio_fuel_price": { - "fname": "repbioprice.csv", + "hierarchy": { "column_mapping": { - "value": "bfuel_price" + "nercr": "nerc_region", + "*r": "region", + "transreg": "transmission_region" }, - "column_index": [ - "region", - "year" - ], - "mandatory": true, - "input": false + "fname": "hierarchy.csv", + "input": true, + "optional": false }, - "ng_fuel_price": { - "fname": "repgasprice_r.csv", + "hour_map": { "column_mapping": { - "value": "ng_fuel_price" + "*timestamp": "time_index" }, - "column_index": [ - "region", - "year" - ], - "mandatory": true, - "input": false + "fname": "hmap_myr.csv", + "input": true, + "optional": false }, - "h2_fuel_price": { - "fname": "h2_price_month.csv", - "column_mapping": { - "$2004/kg": "h2_price" - }, - "column_index": [ - "region", - "year", - "month" - ], - "units": "$/kg" + "hour_map_h17": { + "fname": "h_dt_szn.csv", + "input": true, + "optional": true }, - "fuel_price": { - "fname": "fuel_price.csv", + "hours": { "column_mapping": { - "value": "fuel_price" + "value": "hour_weight" }, - "column_index": [ - "year", - "tech", - "region" - ], - "units": "$/MMBtu", - "mandatory": true, - "input": true + "fname": "hours.csv", + "input": false, + "optional": false }, - "fuels": { - "fname": "fuel2tech.csv", + "hydro_cap_adj": { "column_mapping": { - "fuel": "fuels" + "*i": "tech", + "r": "region", + "month": "month", + "value": "hydro_cf_adj" }, - "mandatory": true, - "input": true + "fname": "hydcapadj.csv", + "input": true, + "optional": false }, - "pollutants": { - "fname": "e.csv", + "hydro_cf": { "column_mapping": { - "eall": "pollutant" + "*i": "tech", + "r": "region", + "month": "month", + "t": "year", + "value": "hydro_cf" }, - "mandatory": true, - "input": true + "fname": "hydcf.csv", + "input": true, + "optional": false }, - "emission_rates": { - "fname": "emit_rate.csv", - "mandatory": true, - "input": false, + "hydro_min_gen": { "column_mapping": { - "value": "production_rate" + "*i": "tech", + "quarter": "season", + "value": "hydro_minload" }, - "column_index": [ - "pollutant", - "tech", - "tech_class", - "region", - "year" - ] + "fname": "hydro_mingen.csv", + "input": true, + "optional": true }, - "cost_vom": { - "fname": "cost_vom.csv", + "ilr": { "column_mapping": { - "value": "cost_vom" + "value": "ilr" }, - "column_index": [ - "year", - "tech", - "tech_class", - "region" - ], - "mandatory": true, + "fname": "ilr.csv", "input": false, - "description": "Variable O&M in $/MWh", - "units": "$/MWh" + "optional": false }, - "forced_outages": { - "fname": "outage_forced.csv", - "mandatory": true, - "columns": [ - "tech", - "forced_outage_rate" - ], - "column_index": [ - "tech" - ], + "load": { + "fname": "load.h5", "input": true, - "dtype": { - "forced_outage_rate": "float32" - } + "optional": false }, - "planned_outages": { - "fname": "outage_planned.csv", - "columns": [ - "tech", - "planned_outages" - ], - "column_index": [ - "tech" - ], - "mandatory": true, - "input": true, - "dtype": { - "planned_outages": "float32" - } + "new_capacity": { + "column_mapping": { + "value": "reeds_capacity_MW" + }, + "fname": "cap_new_ivrt.csv", + "input": false, + "optional": false }, - "storage_durations": { - "fname": "storage_duration.csv", - "columns": [ - "tech", - "duration" - ], - "column_index": [ - "tech" - ], - "mandatory": true, - "input": true, - "dtype": { - "duration": "int32" - } + "ng_fuel_price": { + "column_mapping": { + "value": "ng_fuel_price" + }, + "fname": "repgasprice_r.csv", + "input": false, + "optional": false }, - "storage_eff": { - "fname": "storage_eff.csv", + "online_capacity": { "column_mapping": { - "value": "storage_eff" + "i": "tech", + "r": "region", + "t": "year", + "v": "tech_vintage", + "value": "rated_capacity" }, - "column_index": [ - "tech", - "year" - ], - "mandatory": true, - "input": false + "fname": "cap_ivrt.csv", + "input": false, + "optional": false }, "opres_supply": { - "fname": "opRes_supply.csv", "column_mapping": { "value": "reserve_capacity_MW" }, - "column_index": [ - "year", - "tech", - "region" - ], - "mandatory": false, - "input": false + "fname": "opRes_supply_h.csv", + "input": false, + "optional": true }, - "hours": { - "fname": "hours.csv", - "input": true, + "planned_outages": { "column_mapping": { - "value": "hour_weight" + "i": "tech", + "value": "planned_outage_rate" }, - "dtype": { - "h": "category" - } + "fname": "planned_outage.csv", + "input": false, + "optional": false }, - "hour_map": { - "fname": "hmap_myr.csv", - "input": true, + "pollutants": { "column_mapping": { - "*timestamp": "time_index" + "eall": "pollutant" }, - "dtype": { - "year": "str", - "season": "category", - "h": "category", - "hour": "int32" - } + "fname": "e.csv", + "input": false, + "optional": false }, - "hydro_cf": { - "fname": "hydcf.csv", + "retirements": { "column_mapping": { - "*i": "tech", - "month": "month", - "value": "hydro_cf" + "value": "retired_MW" }, - "column_index": [ - "year", - "tech", - "region", - "month" - ], - "mandatory": false, - "input": true + "fname": "ret_ivrt.csv" }, - "hydro_min_gen": { - "fname": "hydro_mingen.csv", + "rs_map": { + "fname": "rsmap.csv", + "input": true, + "optional": true + }, + "storage_duration": { "column_mapping": { - "*i": "tech", - "quarter": "season", - "value": "hydro_minload" + "i": "tech", + "value": "duration" }, - "column_index": [ - "tech", - "region", - "season" - ], - "mandatory": true, - "input": true + "fname": "storage_duration.csv", + "input": false, + "optional": false }, - "hydro_cap_adj": { - "fname": "hydcapadj.csv", + "storage_eff": { "column_mapping": { - "*i": "tech", - "month": "month", - "value": "hydro_cf_adj" + "i": "tech", + "t": "year", + "value": "charge_efficiency" }, - "column_index": [ - "tech", - "region", - "month" - ], - "mandatory": true, + "fname": "storage_eff.csv", + "input": false, + "optional": false + }, + "switches": { + "fname": "switches.csv", "input": true, - "note": "Monthly capacity adjustment." + "optional": false }, - "ilr": { - "fname": "ilr.csv", + "technologies": { + "fname": "valcap_i.csv", + "input": false, + "optional": false + }, + "technology_class": { "column_mapping": { - "value": "ilr" + "*": "tech_class" + }, + "fname": "v.csv", + "input": false, + "optional": false + }, + "tx_cap": { + "column_mapping": { + "r": "from_bus", + "rr": "to_bus", + "t": "year", + "trtype": "kind", + "value": "max_active_power" }, - "column_index": [ - "tech" - ] + "fname": "tran_cap_energy.csv", + "optional": false }, - "electrolyzer_load": { - "fname": "prod_load.csv", + "tx_losses": { "column_mapping": { - "value": "electrolizer_load_MW" + "r": "from_bus", + "rr": "to_bus", + "trtype": "kind", + "value": "losses" }, - "column_index": [ - "tech", - "region", - "h", - "year" - ], - "units": "MW" + "fname": "tranloss.csv", + "input": false, + "optional": false }, - "switches": { - "fname": "switches.csv", - "columns": [ - "switch", - "value" - ], - "column_index": [ - "switch" - ], - "mandatory": true + "tx_out": { + "column_mapping": { + "r": "region", + "rr": "region_to", + "t": "year", + "trtype": "kind", + "value": "flow_MW" + }, + "fname": "tran_out.csv", + "optional": true + }, + "upv_sc_out": { + "fname": "df_sc_out_upv_reduced.csv", + "optional": true + }, + "wind-ofs_sc_out": { + "fname": "df_sc_out_wind-ofs_reduced.csv", + "optional": true + }, + "wind-ons_sc_out": { + "fname": "df_sc_out_wind-ons_reduced.csv", + "optional": true + }, + "years": { + "fname": "modeledyears.csv", + "input": true, + "optional": true } } diff --git a/tests/test_r2x_integration.py b/tests/test_r2x_integration.py index bd244c2..28055dc 100644 --- a/tests/test_r2x_integration.py +++ b/tests/test_r2x_integration.py @@ -1,28 +1,37 @@ -import os from utils import get_missing_files, get_missing_columns -def test_r2x_integration(casepath, fmap): +def test_r2x_files_exists(fmap, r2x_files): + """Test that the files used in downstream models are created""" file_list = [ - value["fname"] for _, value in fmap.items() if value.get("mandatory", False) + value["fname"] + for _, value in fmap.items() + if not value.get("optional") # Only files that are not optional ] + + fnames = list(map(lambda fpath: fpath.name, r2x_files)) + filest_to_check = list(map(lambda fpath: r2x_files[fnames.index(fpath)], file_list)) + missing_files = get_missing_files(filest_to_check) + assert ( + len(missing_files) == 0 + ), f"The following files are missing: {list(map(lambda fpath: fpath.name, missing_files))}" + + + +def test_r2x_files_column_check(fmap, validation_files): + """Test that files needed have the appropiate columns""" file_column_dict = { - value["fname"]: value.get("column_mapping").keys() + value["fname"]: value["column_mapping"].keys() for _, value in fmap.items() - if value.get("column_mapping") # Only process files with column_mapping - if value.get("mandatory", False) + if not value.get("optional", False) # Only files that are not optional + if value.get("column_mapping", False) # Only process files with column_mapping } - # Verify files first. - missing_files = get_missing_files(casepath, file_list) - assert len(missing_files) == 0, f"The following files are missing: {missing_files}" + # fnames = list(map(lambda fpath: fpath.name, files)) + fname = validation_files.name + fpath = validation_files missing_columns = [] - for fname, expected_columns in file_column_dict.items(): - for path_prefix in ["outputs", "inputs_case", "inputs_params"]: - fpath = os.path.join(casepath, path_prefix, fname) - if os.path.isfile(fpath): - missing_columns = get_missing_columns(fpath, expected_columns) - assert ( - len(missing_columns) == 0 - ), f"Missing columns in {fpath}: {missing_columns}" + expected_columns = file_column_dict[fname] + missing_columns = get_missing_columns(fpath, expected_columns) + assert len(missing_columns) == 0, f"Missing columns in {fpath}: {missing_columns}" diff --git a/tests/utils.py b/tests/utils.py index 91d56ad..2892b63 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -8,20 +8,6 @@ # Third-party packages import pandas as pd -DEFAULT_SET_MAP = { - "i": "tech", - "v": "tech_class", - "r": "region", - "*r": "region", - "*szn": "season", - "t": "year", - "*t": "year", - "allh": "h", - "allt": "year", - "f": "fuel", - "e": "pollutant", -} - def get_missing_columns(fpath: str, column_names: list) -> list: """List of missing columns from a csv file. @@ -39,7 +25,6 @@ def get_missing_columns(fpath: str, column_names: list) -> list: df = ( pd.read_csv(fpath, nrows=0) .rename(columns=str.lower) - .rename(columns=DEFAULT_SET_MAP) ) except pd.errors.EmptyDataError: raise ValueError(f"Required file for R2X: {fpath} is empty!") @@ -48,44 +33,17 @@ def get_missing_columns(fpath: str, column_names: list) -> list: def get_missing_files( - project_folder: str, file_list: Iterable, max_depth: int = 2 + file_list: Iterable[os.PathLike] ) -> list: - """List missing required files from project folder. - - This function looks recursively in the project folder. For safety we only - look 2 levels of folders + """Get missing files from reeds inputs_case/outputfolder. Args: - project_folder: Folder to look for the files - file_list: Iterable of files to check - max_depth: Level of subfolders to look. + file_list: Iterable object that contains Path's to check. Returns: - A list with the missing files or empty list + A list with the missing files or an empty list """ - all_files = set() - - # Initialize stack with the project folder and depth 0 - input_folder = os.path.join(project_folder, "inputs_case") - output_folder = os.path.join(project_folder, "outputs") - stack: list[tuple[str, int]] = [(input_folder, 0), (output_folder, 0)] - - while stack: - current_folder, current_depth = stack.pop() - - if current_depth > max_depth: - continue - - for root, dirs, dir_files in os.walk(current_folder): - for file_name in dir_files: - file_path = os.path.join(root, file_name) - all_files.add(os.path.basename(file_path)) - - for folder in dirs: - next_folder = os.path.join(root, folder) - stack.append((next_folder, current_depth + 1)) - missing_files = [f for f in file_list if os.path.basename(f) not in all_files] - return missing_files + return [f for f in file_list if not os.path.exists(f)] #%% Imports