Skip to content

Commit

Permalink
Merge pull request #35 from JGCRI/demeter_1.3.1
Browse files Browse the repository at this point in the history
Demeter 1.3.1
  • Loading branch information
kanishkan91 committed Aug 22, 2023
2 parents 9aeac52 + c244779 commit 70f2b7f
Show file tree
Hide file tree
Showing 23 changed files with 17,838 additions and 405 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -144,3 +144,4 @@ dmypy.json

# exclude dirs
demeter.egg-info/
.Rproj.user
2 changes: 2 additions & 0 deletions R_interface/.Rbuildignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
^.*\.Rproj$
^\.Rproj\.user$
12 changes: 12 additions & 0 deletions R_interface/DESCRIPTION
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
Package: Rdemeter
Type: Package
Title: R version of demeter
Version: 0.1.0
Authors@R: c(person(given='Marcus', family='Perry', email='marcus.perry@pnnl.gov', role=c('aut','cre')),
person(given='Kanishka B.', family='Narayan', email='kanishka.narayan@pnnl.gov', role=c('aut')))
Maintainer: Marcus Perry <marcus.perry@pnnl.gov>
Description: Generates workflow to allow user to run demeter via R directly.
License: BSD2
Encoding: UTF-8
LazyData: true
RoxygenNote: 7.1.1
1 change: 1 addition & 0 deletions R_interface/NAMESPACE
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
exportPattern("^[[:alpha:]]+")
25 changes: 25 additions & 0 deletions R_interface/R/demeter.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@


run_demeter_R <- function(path_to_python= "../venv/Scripts/python.exe",
path_to_inputs="C:/Projects/demeter_sample/config_gcam_reference.ini",
write_outputs=TRUE){

Sys.setenv(RETICULATE_PYTHON = path_to_python)
print("Setup complete for env")
demeter_R <- reticulate::import("demeter")
print("Package imported")
demeter_R$run_model(config_file=path_to_inputs, write_outputs=TRUE)
print("Demeter run successfully completed in R!")

}


port_model_to_R <- function(path_to_python= "../venv/Scripts/python.exe",
model_name="demeter"){

Sys.setenv(RETICULATE_PYTHON = path_to_python)
print("Setup complete for env")
model <- reticulate::import(model_name)

return(model)
}
32 changes: 32 additions & 0 deletions R_interface/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
# Rdemeter

This is an R interface for demeter.

To use the R functionality, please use the code below by activating the R proj file.

User will first have to ensure that the python version of demeter is installed.

After that, user can run the below to intialize Rdemeter. Note that the user will need to install the `reticulate` R
package.

```R

devtools::load_all(.)
Rdemeter <- port_model_to_R(model_name="demeter",
path_to_python="where python for demeter is installed")
```

After this, the user can access any function from demeter in the `Rdemeter` object, by using `Rdemeter$function_name`.
User can also make use of the convenience function below to pass a demeter config file from R-

```R

run_demeter_R(path_to_inputs=" Add path to config here",
write_outputs=TRUE)

```





20 changes: 20 additions & 0 deletions R_interface/Rdemeter.Rproj
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
Version: 1.0

RestoreWorkspace: Default
SaveWorkspace: Default
AlwaysSaveHistory: Default

EnableCodeIndexing: Yes
UseSpacesForTab: Yes
NumSpacesForTab: 2
Encoding: UTF-8

RnwWeave: Sweave
LaTeX: pdfLaTeX

AutoAppendNewline: Yes
StripTrailingWhitespace: Yes

BuildType: Package
PackageUseDevtools: Yes
PackageInstallArgs: --no-multiarch --with-keep.source
87 changes: 53 additions & 34 deletions demeter/change/expansion.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,54 @@
import numpy as np
import os
from scipy import stats
import threading
from multiprocessing.dummy import Pool as ThreadPool
from itertools import repeat

def extense_parallel_helper(regix_metix,log, c, allregnumber, allregmet, spat_ludataharm, spat_region, spat_met, kernel_vector, cons_data,
order_rules, final_landclasses, constraint_rules, transition_rules, land_mismatch,
spat_ludataharm_orig_steps, target_change, yr,diag_file):

reg_idx, met_idx = regix_metix
#print("processing region " + str(reg_idx))

# set previous region index to current
#prev_reg = reg_idx

# update user per region change
regnumber = allregnumber[reg_idx]
metnumber = allregmet[reg_idx][met_idx]
metnum_idx = metnumber - 1
# update user per region change
reg_met_mask = (spat_region == regnumber) & (spat_met == metnumber)
spat_ludataharm_sub = spat_ludataharm[reg_met_mask]
kernel_vector_sub = kernel_vector[reg_met_mask]
cons_data_sub = cons_data[reg_met_mask]

# calculate expansion for each PFT
exp = _expansion(c.diagnostic, diag_file, spat_ludataharm_sub, kernel_vector_sub, cons_data_sub, reg_idx,
metnum_idx, order_rules, final_landclasses, c.errortol, constraint_rules, transition_rules,
c.stochastic_expansion, c.selection_threshold, land_mismatch, target_change)

# apply expansion and update transitions
spat_ludataharm[reg_met_mask], target_change, trans_mat = exp
# transitions[reg_met_mask, :, :] += trans_mat

# calculate non-achieved change


non_chg = np.sum(abs(target_change[:, :, :])) / 2.

if non_chg > 0:
non_chg_per = np.sum(abs(target_change[:, :, :].flatten())) / np.sum(abs(land_mismatch[:, :, :].flatten())) * 100

else:
non_chg_per = 0

#log.info("Total non-achieved expansion change for time step {0}: {1} km2 ({2} %)".format(yr, non_chg, non_chg_per))

# close file if diagnostic



def _convert_pft(notdone, exp_target, met_idx, pft_toconv, spat_ludataharm_sub, pft, cons_data_subpft, reg,
Expand Down Expand Up @@ -233,41 +281,12 @@ def apply_expansion(log, c, allregnumber, allregmet, spat_ludataharm, spat_regio
# build region_idx, metric_idx iterator
regix_metix = _reg_metric_iter(allregnumber, allregmet)

for reg_idx, met_idx in regix_metix:

# get region and metric from index
regnumber = allregnumber[reg_idx]
metnumber = allregmet[reg_idx][met_idx]
metnum_idx = metnumber - 1

# create data subset
reg_met_mask = (spat_region == regnumber) & (spat_met == metnumber)
spat_ludataharm_sub = spat_ludataharm[reg_met_mask]
kernel_vector_sub = kernel_vector[reg_met_mask]
cons_data_sub = cons_data[reg_met_mask]

# calculate expansion for each PFT
exp = _expansion(c.diagnostic, diag_file, spat_ludataharm_sub, kernel_vector_sub, cons_data_sub, reg_idx,
metnum_idx, order_rules, final_landclasses, c.errortol, constraint_rules, transition_rules,
c.stochastic_expansion, c.selection_threshold, land_mismatch, target_change)

# apply expansion and update transitions
spat_ludataharm[reg_met_mask], target_change, trans_mat = exp
#transitions[reg_met_mask, :, :] += trans_mat

# calculate non-achieved change
non_chg = np.sum(abs(target_change[:, :, :])) / 2.

if non_chg > 0:
non_chg_per = np.sum(abs(target_change[:, :, :].flatten())) / np.sum(abs(land_mismatch[:, :, :].flatten())) * 100
pool = ThreadPool(len(np.unique(regix_metix)))

else:
non_chg_per = 0
pool.starmap(extense_parallel_helper,zip(regix_metix,repeat(log), repeat(c), repeat(allregnumber), repeat(allregmet), repeat(spat_ludataharm), repeat(spat_region), repeat(spat_met), repeat(kernel_vector), repeat(cons_data),
repeat(order_rules), repeat(final_landclasses), repeat(constraint_rules), repeat(transition_rules), repeat(land_mismatch),
repeat(spat_ludataharm_orig_steps), repeat(target_change), repeat(yr),repeat(diag_file)))

# log.info("Total non-achieved expansion change for time step {0}: {1} km2 ({2} %)".format(yr, non_chg, non_chg_per))

# close file if diagnostic
if c.diagnostic == 1:
diag_file.close()
pool.terminate()

return [spat_ludataharm, spat_ludataharm_orig_steps, land_mismatch, cons_data, target_change]
125 changes: 72 additions & 53 deletions demeter/change/intensification.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,69 @@

import os
import numpy as np

import threading
from multiprocessing.dummy import Pool as ThreadPool
from itertools import repeat
import demeter.demeter_io.writer as wdr

def intense_parallel_helper(regix_metix,spat_region, order_rules, allregnumber, allregmet, spat_ludata,
spat_landmatrix, gcam_landmatrix, yr_idx, d_regid_nm, target_change, spat_ludataharm,
spat_met, kernel_vector, cons_data, final_landclasses,spat_ludataharm_orig_steps, yr,
land_mismatch, constraint_rules, transition_rules,log, pass_number, c,diag_file):

reg_idx, met_idx = regix_metix
# print("processing region " + str(reg_idx))

# set previous region index to current
#prev_reg = reg_idx

# update user per region change

# update user per region change
regnumber, reg_idx, target_intensification = _create_summary(reg_idx, allregnumber, spat_ludata,
spat_landmatrix, gcam_landmatrix, d_regid_nm,
log, spat_region, yr_idx, target_change,
pass_number, c)

# calculate and write area diagnostic
# diff_diagnostic(c.diag_dir, d_regid_nm, gcam_landmatrix, spat_landmatrix, reg_idx, yr, yr_idx)

# retrieve region and metric number
metnumber = allregmet[reg_idx][met_idx]

# create data subset
reg_met_mask = (spat_region == regnumber) & (spat_met == metnumber)
spat_ludataharm_sub = spat_ludataharm[reg_met_mask]
kernel_vector_sub = kernel_vector[reg_met_mask]
cons_data_sub = cons_data[reg_met_mask]

# calculate intensification
citz = _intensification(c.diagnostic, diag_file, spat_ludataharm_sub, target_intensification, kernel_vector_sub,
cons_data_sub, reg_idx, metnumber, order_rules, final_landclasses, c.errortol,
constraint_rules, target_change, transition_rules, land_mismatch)

# apply intensification
spat_ludataharm[reg_met_mask], trans_mat, target_change, target_intensification = citz

# log transition
# transitions[reg_met_mask, :, :] += trans_mat

# calculate non-achieved change


non_chg = np.sum(abs(target_change[:, :, :])) / 2.0

if non_chg > 0:
non_chg_per = np.sum(abs(target_change[:, :, :].flatten())) / np.sum(abs(land_mismatch[:, :, :].flatten())) * 100
else:
non_chg_per = 0

#log.info("Total non-achieved intensification change for pass {0} time step {1}: {2} km2 ({3} %)".format(pass_number, yr, non_chg, non_chg_per))






def diff_diagnostic(diag_outdir, d_regid_nm, gcam_landmatrix, spat_landmatrix, reg, yr, yr_idx):
"""
Expand All @@ -30,6 +90,9 @@ def diff_diagnostic(diag_outdir, d_regid_nm, gcam_landmatrix, spat_landmatrix, r
wdr.array_to_csv(spat_landmatrix[reg, :, :], base_out)





def reg_metric_iter(allregnumber, allregmet):
"""
Create region, metric iterator.
Expand Down Expand Up @@ -289,60 +352,16 @@ def apply_intensification(log, pass_number, c, spat_region, order_rules, allregn
# build region_idx, metric_idx iterator
regix_metix = reg_metric_iter(allregnumber, allregmet)

pool = ThreadPool(len(np.unique(regix_metix)))

pool.starmap(intense_parallel_helper,zip(regix_metix,repeat(spat_region), repeat(order_rules), repeat(allregnumber), repeat(allregmet), repeat(spat_ludata),
repeat(spat_landmatrix), repeat(gcam_landmatrix), repeat(yr_idx), repeat(d_regid_nm), repeat(target_change), repeat(spat_ludataharm),
repeat(spat_met), repeat(kernel_vector), repeat(cons_data), repeat(final_landclasses),repeat(spat_ludataharm_orig_steps), repeat(yr),
repeat(land_mismatch), repeat(constraint_rules), repeat(transition_rules),repeat(log), repeat(pass_number), repeat(c),repeat(diag_file)))
# for each region
for index, pkg in enumerate(regix_metix):
#for index, pkg in enumerate(regix_metix):

# unpack index vars
reg_idx, met_idx = pkg

if index == 0:
# set previous region index to current
prev_reg = reg_idx

# update user per region change
if (index == 0) or (reg_idx != prev_reg):

# update user per region change
regnumber, prev_reg, target_intensification = _create_summary(reg_idx, allregnumber, spat_ludata,
spat_landmatrix, gcam_landmatrix, d_regid_nm,
log, spat_region, yr_idx, target_change,
pass_number, c)

# calculate and write area diagnostic
# diff_diagnostic(c.diag_dir, d_regid_nm, gcam_landmatrix, spat_landmatrix, reg_idx, yr, yr_idx)

# retrieve region and metric number
metnumber = allregmet[reg_idx][met_idx]

# create data subset
reg_met_mask = (spat_region == regnumber) & (spat_met == metnumber)
spat_ludataharm_sub = spat_ludataharm[reg_met_mask]
kernel_vector_sub = kernel_vector[reg_met_mask]
cons_data_sub = cons_data[reg_met_mask]

# calculate intensification
citz = _intensification(c.diagnostic, diag_file, spat_ludataharm_sub, target_intensification, kernel_vector_sub,
cons_data_sub, reg_idx, metnumber, order_rules, final_landclasses, c.errortol,
constraint_rules, target_change, transition_rules, land_mismatch)

# apply intensification
spat_ludataharm[reg_met_mask], trans_mat, target_change, target_intensification = citz

# log transition
#transitions[reg_met_mask, :, :] += trans_mat

# calculate non-achieved change
non_chg = np.sum(abs(target_change[:, :, :])) / 2.0

if non_chg > 0:
non_chg_per = np.sum(abs(target_change[:, :, :].flatten())) / np.sum(abs(land_mismatch[:, :, :].flatten())) * 100
else:
non_chg_per = 0

# log.info("Total non-achieved intensification change for pass {0} time step {1}: {2} km2 ({3} %)".format(pass_number, yr, non_chg, non_chg_per))

# close file if diagnostic
if c.diagnostic == 1:
diag_file.close()

pool.terminate()
return [spat_ludataharm, spat_ludataharm_orig_steps, land_mismatch, cons_data, target_change]
7 changes: 7 additions & 0 deletions demeter/config_reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,6 +175,13 @@ def __init__(self, params):
self.use_constraints = self.valid_integer(run_params.get('use_constraints', 1), 'use_constraints', [0, 1])
self.spatial_resolution = self.valid_limit(run_params.get('spatial_resolution', 0.25), 'spatial_resolution', [0.0, 1000000.0], 'float')
self.regrid_resolution = self.valid_limit(run_params.get('regrid_resolution', self.spatial_resolution), 'regrid_resolution', [0.0, 1000000.0], 'float')

self.external_scenario_PFT_name= (run_params.get('PFT_name_to_replace','Urban'))
self.external_scenario = (run_params.get('ext_scenario','SSP1'))
self.stitch_external = self.valid_integer(run_params.get('stitch_external', 0), 'stitch_external', [1, 0])
self.path_to_external = (run_params.get('path_to_external',self.input_dir))


self.errortol = self.valid_limit(run_params.get('errortol', 0.001), 'errortol', [0.0, 1000000.0], 'float')
self.timestep = self.valid_limit(run_params.get('timestep', 5), 'timestep', [1, 1000000], 'int')
self.proj_factor = self.valid_limit(run_params.get('proj_factor', 1000), 'proj_factor', [1, 10000000000], 'int')
Expand Down
Loading

0 comments on commit 70f2b7f

Please sign in to comment.