Skip to content

Commit

Permalink
Adding changes based on C Vernon's review
Browse files Browse the repository at this point in the history
  • Loading branch information
kanishkan91 committed Jan 8, 2023
1 parent ee9585f commit 0ef1c0c
Show file tree
Hide file tree
Showing 5 changed files with 28 additions and 135 deletions.
14 changes: 0 additions & 14 deletions demeter/ncdf_conversion.py
Original file line number Diff line number Diff line change
Expand Up @@ -259,17 +259,3 @@ def process_output(self,
return ds


if __name__ == "__main__":

input_file_directory = "/Users/d3y010/Downloads"
output_file_directory = "/Users/d3y010/Desktop"
target_year = 2005

# instantiate class
x = DemeterToNetcdf(scenario_name="rcp85cooler",
project_name="im3")

# convert demeter output to a NetCDF file
x.process_output(input_file_directory=input_file_directory,
output_file_directory=output_file_directory,
target_year=target_year)
10 changes: 2 additions & 8 deletions demeter/process.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,8 +58,7 @@ def prep_step(self):
self.s.lat, self.s.lon, self.step, self.s.kernel_vector,
self.s.weights, self.s.spat_ludataharm)

# create transition array to store data
#self.transitions = np.zeros(shape=(self.l_spat_region, self.l_order_rules, self.l_order_rules))


def intense_pass(self, pass_num):
"""Conduct the first pass of intensification."""
Expand Down Expand Up @@ -128,12 +127,7 @@ def outputs(self):
# convert land cover from sqkm per grid cell per land class to fraction (n_grids, n_landclasses)
fraction_lu = self.s.spat_ludataharm / np.tile(self.s.cellarea * self.s.celltrunk, (self.l_fcs, 1)).T

# optionally save land cover transitions as a CSV
#if (self.config.save_transitions == 1) and (self.step in self.config.target_years_output):

# self.config.logger.info("Saving land cover transition files for time step {0}...".format(self.step))

# wdr.write_transitions(self.s, self.config, self.step, self.transitions)

# create a NetCDF file of land cover fraction for each year by grid cell containing each land class

Expand All @@ -151,7 +145,7 @@ def outputs(self):
write_ncdf =True
return wdr.lc_timestep_csv(self.config, self.step, self.s.final_landclasses, self.s.spat_coords, orig_spat_aez,
self.s.spat_region, self.s.spat_water, self.s.cellarea, self.s.spat_ludataharm,
self.config.metric, self.config.tabular_units, self.write_outputs,write_ncdf,self.sce,self.res,write_csv,self.regrid_res)
self.config.metric, self.config.tabular_units, self.write_outputs, write_ncdf, self.sce, self.res, write_csv, self.regrid_res)

def process(self):
"""
Expand Down
32 changes: 21 additions & 11 deletions demeter/weight/kernel_density.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,19 +18,30 @@


def handle_single_pft(pft_order, order_rules, final_landclasses, pft_maps, cellindexresin,
spat_ludataharm, kernel_maps, kernel_vector,weights):
"""
Helper function to handle pft convultion filters. This is used to parallelize the convultion filter operation to speed up processing.
spat_ludataharm, kernel_maps, kernel_vector, weights):

"""Helper function to handle pft convultion filters. This is passed to apply_convultiion This is used to parallelize the convultion filter operation to speed up processing.
"""
Parameters:
pft_order (List[int]): The order of pfts
order_rules (List[str]): The order rules defined by user
final_landclasses (List[int]): A list of land classes
pft_maps (dict): A map of pfts
cellindexresin (Union[None, int]): Input resolution
spat_ludataharm (pd.Dataframe): Dataframe of land use
Returns:
list: processed convultion filters for each LT.
"""
pft = np.where(order_rules == pft_order)[0][0]
# get final land class name
flc = final_landclasses[pft]
# print(pft)

# populate pft_maps array with base land use layer data
pft_maps[np.int_(cellindexresin[0, :]), np.int_(cellindexresin[1, :]), pft] = spat_ludataharm[:, pft]
# print(pft_maps.shape)

# apply image filter

kernel_maps[:, :, pft] = ndimage.filters.convolve(pft_maps[:, :, pft], weights, output=None, mode='wrap')
Expand All @@ -40,8 +51,8 @@ def handle_single_pft(pft_order, order_rules, final_landclasses, pft_maps, celli

min_seed = 0.0000000001
kernel_maps[:, :, pft][
kernel_maps[:, :, pft] == 0] = min_seed # np.nanmin(kernel_maps[:, :, pft], [kernel_maps[:, :, pft] > 0])
# print(kernel_maps.shape)
kernel_maps[:, :, pft] == 0] = min_seed # np.nanmin(kernel_maps[:, :, pft], [kernel_maps[:, :, pft] > 0])

# reshaping to the spatial grid-cell data (vector)
kernel_vector[:, pft] = kernel_maps[np.int_(cellindexresin[0, :]), np.int_(cellindexresin[1, :]), pft]

Expand Down Expand Up @@ -184,13 +195,12 @@ def apply_convolution(self, cellindexresin, pft_maps, kernel_maps, lat, lon, yr,
order_rules= self.order_rules
final_landclasses= self.final_landclasses



pool.starmap(handle_single_pft, zip(aux_val,repeat(order_rules),repeat(final_landclasses),
pool.starmap(handle_single_pft, zip(aux_val,repeat(order_rules), repeat(final_landclasses),
repeat(pft_maps),repeat(cellindexresin),
repeat(spat_ludataharm),
repeat(kernel_maps), repeat(kernel_vector),repeat(weights)))
pool.terminate()

return kernel_vector


Expand Down
97 changes: 0 additions & 97 deletions processing_scripts/convert_csv_col_to_ncdf_subdata.py

This file was deleted.

10 changes: 5 additions & 5 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,11 @@ def readme():
description='A land use land cover change disaggregation model',
long_description=readme(),
long_description_content_type="text/markdown",
install_requires=['configobj~=5.0.6',
'numpy~=1.20.3',
'pandas~=1.2.4',
'scipy~=1.6.3',
'requests~=2.20.0',
install_requires=['configobj>=5.0.6',
'numpy >=1.20.3',
'pandas >=1.2.4',
'scipy >=1.6.3',
'requests>=2.20.0',
'gcamreader>=1.2.5',
'xarray >= 0.20.2'],
include_package_data=True
Expand Down

0 comments on commit 0ef1c0c

Please sign in to comment.