From ffaabbc3704faabfbaeb0dcbde3ec1d8e3016db3 Mon Sep 17 00:00:00 2001 From: Mukul Maheshwari Date: Sun, 5 Jul 2020 20:17:48 +0530 Subject: [PATCH] Squashed commit of the following: commit 671027d70d8c04fb74d0c4f1aec5c6a8aa16b288 Merge: 72c5742d5 e11e42782 Author: Mukul Maheshwari Date: Sun Jul 5 16:48:35 2020 +0530 Merge branch 'develop' into book-build-on-actions updated develop with book actions commit 72c5742d5b504018b03ec42d344ebac0ef5c1479 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Sun Jul 5 14:47:53 2020 +0530 created ci.yml commit e11e42782b18d537c55f1863cf8b3bf08253a8ec Merge: bea3f00a6 4f1324a30 Author: Rob Kooper Date: Fri Jul 3 17:47:45 2020 -0500 Merge pull request #2649 from dlebauer/dev_intro2 Minor changes to DEV-INTRO.md commit 4f1324a30bfe01e4935661d84cc86a98ad21921c Merge: bfbdd5aff bea3f00a6 Author: David LeBauer Date: Fri Jul 3 13:54:53 2020 -0700 Merge branch 'develop' into dev_intro2 commit bea3f00a6cf172d410f7ea88c3449d992d0ee031 Merge: b689d08ed e04d204e8 Author: David LeBauer Date: Fri Jul 3 13:53:41 2020 -0700 Merge pull request #2648 from KristinaRiemer/update_ed2in_name Update ED2IN v 2.2.0 name commit bfbdd5aff7f56949d2c991a9bc14153a2030312f Author: David LeBauer Date: Fri Jul 3 13:52:06 2020 -0700 Update DEV-INTRO.md commit 856a7e45a32d26ad5bca982f30968eb9b0e051ae Author: David LeBauer Date: Thu Jul 2 17:32:09 2020 -0700 Update DEV-INTRO.md commit e04d204e8545c5636a37f993ef32c90480186a80 Author: Kristina Riemer Date: Thu Jul 2 12:48:22 2020 -0700 Update ED2IN v 2.2.0 name commit b689d08ed6dcc325c187c37dabeb6fd77f6032dc Merge: 16fbbb3c8 766922b3e Author: Michael Dietze Date: Tue Jun 30 09:32:09 2020 -0400 Merge pull request #2645 from ayushprd/geesmap GEE - SMAP script commit 766922b3e7d7ed4d047e3aee2baf346432675e5a Author: Ayush Prasad Date: Tue Jun 30 16:56:03 2020 +0530 added gee2pecan_smap() commit 16fbbb3c8d20e429552e5d8aa0b88227338f7d65 Merge: 092a43e10 4c7a6e7b8 Author: Michael Dietze Date: Mon Jun 29 15:28:59 2020 -0400 Merge pull request #2610 from robkooper/docker-develop how to develop using docker commit 4c7a6e7b8fd41fb4de1952cc86e9ffb6cf2172a5 Merge: 1e43abe79 092a43e10 Author: Rob Kooper Date: Mon Jun 29 13:18:21 2020 -0500 Merge branch 'develop' into docker-develop commit 1e43abe79fbdfc9b8bf90b0b479093a73e35b4c9 Author: Rob Kooper Date: Mon Jun 29 12:23:44 2020 -0500 missing quote commit d9cde51c408eee225b2e0b8728e3acec07190620 Author: Rob Kooper Date: Mon Jun 29 08:59:40 2020 -0500 more explicit about commands for windows/linux/mac commit 092a43e10b7a8e3184742311b867f08c613aa4ae Merge: d1a30bef4 88a487526 Author: Michael Dietze Date: Sun Jun 28 18:13:20 2020 -0400 Merge pull request #2496 from PecanProject/biocro_annual_grass no carbon storage across years for annual grasses in BioCro commit 88a487526f3d79b8ebfb66beeac3d5e1fd4584ef Merge: a868addb4 d1a30bef4 Author: Michael Dietze Date: Sun Jun 28 17:14:26 2020 -0400 Merge branch 'develop' into biocro_annual_grass commit d1a30bef47bdbb370453ae4fdbdec195a0470945 Merge: 46f60f9b9 dcf27a1e9 Author: Michael Dietze Date: Sun Jun 28 17:14:12 2020 -0400 Merge pull request #2643 from infotroph/bookdown-workaround Fix for failing documentation build commit a868addb471e5188c370f97c64d93c551f2e893c Author: runner Date: Sun Jun 28 09:29:08 2020 +0000 automated syle update commit 07ec40c5e2135df64aec733c2002ffd7723eda53 Merge: ad7e98384 46f60f9b9 Author: Chris Black Date: Sun Jun 28 11:22:11 2020 +0200 Merge branch 'develop' into biocro_annual_grass commit dcf27a1e9cca8495e25be2fbcbca6a0f26747265 Author: Chris Black Date: Sun Jun 28 09:58:51 2020 +0200 work around "Input files not all in same directory, please supply explicit wd" from bookdown 0.20 commit 46f60f9b9a2016864f557c2f1b24a166c09af1ec Merge: 013bc2059 c62d28330 Author: Michael Dietze Date: Sat Jun 27 22:29:07 2020 -0400 Merge pull request #2631 from tezansahu/api_1 API endpoints for ping, models, workflows & runs (with authentication) commit c62d2833009cf485dd8b32795107ec545c40f167 Merge: c18023bc8 88188abd6 Author: Tezan Sahu Date: Sat Jun 27 16:34:55 2020 -0500 Merge branch 'api_1' of github.com:tezansahu/pecan into api_1 commit c18023bc88f2865da30ab5f653773a5ed7afdccc Merge: 71e6fb72f 013bc2059 Author: Tezan Sahu Date: Sat Jun 27 16:33:55 2020 -0500 Merge branch 'develop' of https://github.com/PecanProject/pecan into api_1 commit 71e6fb72f6c23f5a4cce8528b6f558131b2a0670 Author: Tezan Sahu Date: Sat Jun 27 21:33:39 2020 +0000 updated swagger docs link in documentation & readme commit 88188abd6d2db53ce83b35f8290a080926993806 Merge: acf68510b 013bc2059 Author: Michael Dietze Date: Sat Jun 27 17:33:18 2020 -0400 Merge branch 'develop' into api_1 commit 013bc2059784942a80d392d04205621410778049 Merge: d30d5c202 b7de3a672 Author: Michael Dietze Date: Sat Jun 27 17:23:27 2020 -0400 Merge pull request #2569 from rahul799/styler-workflow Styler workflow commit b7de3a672bed0e3901488c0f3ef5161e976b9db2 Author: Chris Black Date: Sat Jun 27 22:39:18 2020 +0200 Update .github/workflows/styler-actions.yml commit acf68510bd6d5688b0cd0cb1e0717dd4ef1d0f48 Author: Tezan Sahu Date: Sat Jun 27 19:42:53 2020 +0000 updated docs with examples commit c4b1bd17f80eb7a09aa5a563b5b6359bc25c8e65 Author: Rahul Agrawal <41531498+rahul799@users.noreply.github.com> Date: Sat Jun 27 22:37:23 2020 +0530 Update .github/workflows/styler-actions.yml Co-authored-by: Chris Black commit 5f7617045ece7546ae751995e2044a489adfda83 Author: Rahul Agrawal <41531498+rahul799@users.noreply.github.com> Date: Sat Jun 27 22:37:10 2020 +0530 Update .github/workflows/styler-actions.yml Co-authored-by: Chris Black commit 3464bffad024cda5f6baeb4c8e8129fb3195646b Author: Rahul Agrawal <41531498+rahul799@users.noreply.github.com> Date: Sat Jun 27 22:36:55 2020 +0530 Update .github/workflows/styler-actions.yml Co-authored-by: Chris Black commit 932f931bb5a8ae78cba2f1a69ca474ef99d8a15e Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Sat Jun 27 18:37:40 2020 +0530 updated bookdown bug commit 63e45d0d65dcdf53fa5f5b1b74086b7fc25a1afc Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Sat Jun 27 17:15:31 2020 +0530 Update book.yml commit 10f9b15dc2083036de6d4017cdf4d9eb560554f2 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Sat Jun 27 17:00:26 2020 +0530 Update book.yml commit cf24ccb05732d155aa4b301c76725dd0eee7ee6f Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Sat Jun 27 16:43:19 2020 +0530 Update book.yml commit 90c985220e60018d53aa3551f8f8f3663c7e9eb7 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Sat Jun 27 16:37:03 2020 +0530 Update book.yml commit 56a97a83707d330c27feb0543d50906b1edc4701 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Sat Jun 27 16:01:35 2020 +0530 Update book.yml commit b59e33d0f32418201e68222abdc0682376a0af11 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Sat Jun 27 13:34:50 2020 +0530 Update book.yml commit 84bef4753ded00715c04805842d103e1dd4870b1 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Sat Jun 27 12:53:46 2020 +0530 Update book.yml commit ef3ceadeccb29ce7eebb34190033f45273393fe1 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Sat Jun 27 12:47:10 2020 +0530 Update book.yml commit 691b82346762327ea338edd9d8770baeda86361b Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Sat Jun 27 12:41:29 2020 +0530 Update book.yml commit 89c51eaaafb2aa3c0a717d371ee5f3933150504a Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Sat Jun 27 11:52:19 2020 +0530 updated tags field commit 8626151d3374afc5a36a34cca70c42ada177a76a Merge: 17a00e23d d30d5c202 Author: Chris Black Date: Fri Jun 26 21:09:54 2020 +0200 Merge branch 'develop' into styler-workflow commit d107248adbb55e5795f025820d551e3bf843ef31 Merge: b0e2f3b93 61829fe3d Author: David LeBauer Date: Thu Jun 25 14:44:08 2020 -0700 Merge branch 'docker-develop' of https://github.com/robkooper/pecan into pr/2610 commit b0e2f3b935da3aa716a69bbce8ab07bbf887f9d2 Author: David LeBauer Date: Thu Jun 25 14:44:02 2020 -0700 Update DEV-INTRO.md commit b3b811fd6ddfed2adeaee145010fc0c9f42c9320 Author: Tezan Sahu Date: Thu Jun 25 17:58:29 2020 +0000 api folder restructuring commit 15851f87701874d196e5454ec6e052442bcb3b11 Merge: 3ca09483f d30d5c202 Author: Tezan Sahu Date: Tue Jun 23 22:17:04 2020 -0500 Merge branch 'develop' of https://github.com/PecanProject/pecan into api_1 commit 3ca09483fc575553d5ce053547f652d534aec823 Author: Tezan Sahu Date: Tue Jun 23 18:37:05 2020 +0000 API documentation for GET endpoints involving models, workflows, runs & general host details commit d30d5c2026858ed55a931303f25110b99dbab2e4 Merge: d7cbc6a68 a84d5b1fd Author: Michael Dietze Date: Mon Jun 22 10:11:11 2020 -0400 Merge pull request #2641 from infotroph/met-process-fix-con Pass correct object to query.format.vars commit 581b4b2c96a2f71b57cff1208a591cd8d7f45d90 Author: Tezan Sahu Date: Mon Jun 22 12:29:28 2020 +0000 docker setup for api commit a84d5b1fda2d37494b4f12d223347943a470f110 Author: Chris Black Date: Sat Jun 20 21:17:52 2020 +0200 src_postgres is deprecated commit 32e95940cbd85d8ccf4efb78774695ae2bd3b366 Author: Chris Black Date: Fri Jun 19 22:40:16 2020 +0200 pass connection not list (plus misc roxygen fixes) commit d7cbc6a68f4ee66489348c1f2646fe390c94ec86 Merge: b1c3f939b be7098f04 Author: Michael Dietze Date: Fri Jun 19 08:11:40 2020 -0400 Merge pull request #2637 from ayushprd/geelai LAI script for remote data module commit be7098f045c1c650b2c8caa0133cf696b917358e Author: Ayush Prasad Date: Fri Jun 19 16:38:14 2020 +0530 unused import commit 3cf8cd7fc7e0c34e839235bad44e49c1055f60c9 Author: Ayush Prasad Date: Fri Jun 19 16:28:15 2020 +0530 added remote_process v1 commit ac23b024e0b5f158ca507b91b7b3c0168d440170 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Thu Jun 18 23:51:46 2020 +0530 Update book.yml commit f4a360617f77bcccc05fa96ff0a3a348d69c82a5 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Thu Jun 18 23:44:46 2020 +0530 updated auth and build jobs commit 81bd7a357c804460d27c2be7501312f10bff645f Author: Tezan Sahu Date: Thu Jun 18 15:35:44 2020 +0000 minor bugfix in dbHostInfo commit 8d7e28f26d08b96a09de0282e500a843257bb836 Author: Tezan Sahu Date: Thu Jun 18 14:59:03 2020 +0000 status endpoint uses env vars commit 24e2184b59423eb8bc0d9c179ce15a8aa3b17681 Author: Tezan Sahu Date: Thu Jun 18 13:26:51 2020 +0000 added status endpoint & modified dbHostInfo() commit 3ee0d5a88bda65adb619c090d059e2841bc55c30 Author: Ayush Prasad Date: Thu Jun 18 11:27:51 2020 +0530 added bands2ndvi commit 390d1839b1443c06fb32d64dd06c110780301c03 Author: Ayush Prasad Date: Thu Jun 18 11:26:25 2020 +0530 added bands2lai_snap commit 22c5ce12bd6772a48ec7a9e074736b40cc569b33 Author: Ayush Prasad Date: Thu Jun 18 11:24:55 2020 +0530 added gee2pecan_bands commit e15bf8de2e038af9459b409bbd13a7cb41bd4717 Author: Ayush Prasad Date: Thu Jun 18 11:22:27 2020 +0530 removed GPP and other unused functions commit 1921baa7797fd5cd007234ba693a9bcc1b94ff28 Author: Ayush Prasad Date: Tue Jun 16 17:35:39 2020 +0530 script to calculate LAI from gee commit c5d7169f0506df54287866ec65af443f7b7abe46 Author: Ayush Prasad Date: Tue Jun 16 17:34:26 2020 +0530 added SNAP biophyscial processor Co-authored-by: Olli Nevalainen commit b1c3f939bc38338a4e23501796ab04762907e806 Author: Ayush Prasad Date: Tue Jun 16 16:44:12 2020 +0530 Sentinel 2 NDVI script for remote data module (#2634) * added satellitetools * added s2ndvi * added test file * removed satellitetool's old functions * Apply suggestions from code review (fixes in docs) Co-authored-by: istfer * added example run and dependencies required * moved test file to satellitetools * updated docs * minor fixes * removed pycache Co-authored-by: istfer Co-authored-by: Michael Dietze commit cdc18ca2d995f94a8888a837a659b9a4a067234f Merge: 4467cf15f 630bd5857 Author: Michael Dietze Date: Mon Jun 15 15:49:00 2020 -0400 Merge pull request #2636 from PecanProject/model2netcdf_version_detection Improve checking for output variable names in model2necdf.ED2 commit 630bd5857a52dd6284eeb1f28c8f4401846529db Merge: d098bc36b 9b4b2f01d Author: mccabete Date: Mon Jun 15 13:13:58 2020 -0400 Merge branch 'model2netcdf_version_detection' of https://github.com/pecanproject/pecan into model2netcdf_version_detection commit d098bc36bc173bda507c89e1c44437427848b475 Author: mccabete Date: Mon Jun 15 13:12:46 2020 -0400 fix if statments so warning will be triggered commit 9b4b2f01d4b42a7b51ae987dba6f75ee78744477 Author: Tess McCabe Date: Mon Jun 15 10:14:56 2020 -0400 Update CHANGELOG.md commit 3a24ca1121bfc7775dafe9ad2c6465321c6e5ad4 Author: mccabete Date: Mon Jun 15 09:29:50 2020 -0400 Improve checking for output variable names commit c3d9944f7cf9681bdc58f6d269c09d333da8f98b Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Mon Jun 15 16:26:22 2020 +0530 Update book.yml commit 907d9e48be811299359471c3b865d923120bd1ad Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Mon Jun 15 16:19:39 2020 +0530 Update book.yml commit 6ff92f0d33bc4cb13d76a99a1d3ffd62ea3611b4 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Mon Jun 15 15:01:45 2020 +0530 Update book.yml commit 3f3f93f204354ec70ae053ea8a6aad67609c70f7 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Mon Jun 15 14:55:45 2020 +0530 Update book.yml commit 64c623fd29195458c197d43a0dbf0c543d725f92 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Sun Jun 14 23:53:14 2020 +0530 Update book.yml commit 299c240277d616463979cdbe09ef57731b8167fc Author: Tezan Sahu Date: Sun Jun 14 03:57:33 2020 -0500 converted all betydb calls to dplyr commit bd912a6d8a400d67efbe2921f5c7939a21816502 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Sun Jun 14 11:52:26 2020 +0530 Update book.yml commit 44b39c6708b4d85bc88ab687890a50afbd21444e Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Sun Jun 14 11:46:00 2020 +0530 Update book.yml commit 6d33db32b6ef22d03dd686d140993cd7841b6c46 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Sat Jun 13 23:34:42 2020 +0530 Update book.yml commit 243471dc18df35a24cb67d24e2a055a92b56a902 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Sat Jun 13 23:20:02 2020 +0530 Update book.yml commit c46047145e3e6461cca1be264e8af9f4b0778157 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Sat Jun 13 23:16:24 2020 +0530 Update book.yml commit aab4efe080e12dcb5021da617ef9bd8f36628678 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Sat Jun 13 22:52:57 2020 +0530 Update book.yml commit b501fbca8215d278036b487da20ffe6a6de87ecc Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Sat Jun 13 22:49:16 2020 +0530 updated book.yml commit 40ad66148753a6d78baf7b8872ad754f551c804d Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Sat Jun 13 17:58:53 2020 +0530 Update book.yml commit 3247d4f03bb22c30de774e1d0a2422838e161d61 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Sat Jun 13 17:19:33 2020 +0530 Update book.yml commit e243af6547826db8c7d7e158c910187e5d5b73e0 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Sat Jun 13 17:14:05 2020 +0530 Update book.yml commit 6f8b0b5ac428180a571d33cfa425e971ffe6cd9a Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Sat Jun 13 16:52:35 2020 +0530 Update book.yml commit 6bcb0f2435cc51b6b9bd97c054e2081b78ce3518 Author: Tezan Sahu Date: Sat Jun 13 00:05:05 2020 -0500 replaces bety connection pipeline with commit 61829fe3ddeac59f771ab1a26e5382ce00b33001 Merge: e95c7c144 4467cf15f Author: Rob Kooper Date: Fri Jun 12 23:06:09 2020 -0500 Merge remote-tracking branch 'upstream/develop' into docker-develop commit eace0cebd7f3ba7d0815d83924388bbab200a1f0 Merge: 154d961d2 4467cf15f Author: Tezan Sahu <31898274+tezansahu@users.noreply.github.com> Date: Sat Jun 13 09:20:28 2020 +0530 Merge branch 'develop' into api_1 commit 4467cf15f967ef6426ac226c8e058e2b1401eefd Merge: d0648000c d6cdebe9f Author: Rob Kooper Date: Thu Jun 11 18:02:05 2020 -0500 Merge pull request #2632 from robkooper/betyconnect-env-vars use get_postgres_envvars in betyConnect commit d6cdebe9fad9666700751fd4785b8a699498f77f Merge: ce51edd78 d0648000c Author: Rob Kooper Date: Thu Jun 11 09:27:56 2020 -0500 Merge branch 'develop' into betyconnect-env-vars commit ce51edd782c6100eee13253472dfda1462b727bd Author: Rob Kooper Date: Thu Jun 11 09:27:47 2020 -0500 Update CHANGELOG.md Co-authored-by: Chris Black commit 94c2b2cd2c0a426cdd44cda3e2202af729168c34 Author: Rob Kooper Date: Thu Jun 11 09:27:36 2020 -0500 Update base/db/R/query.dplyr.R Co-authored-by: Chris Black commit d0648000c59e5367d9c5e9cec0aaf4194da0ee62 Merge: 5ee572b2e 55339f32b Author: Michael Dietze Date: Thu Jun 11 08:06:26 2020 -0400 Merge pull request #2617 from tezansahu/develop updated steps used for installing pecan using docker commit 7a9f75c040fb7bbcde3efc9eaf0890138eb58bb2 Merge: 7877d59a0 5ee572b2e Author: Rob Kooper Date: Wed Jun 10 20:55:55 2020 -0500 Merge branch 'develop' into betyconnect-env-vars commit 7877d59a0b0be873f8525ce4af7c846ebfb312b5 Author: Rob Kooper Date: Wed Jun 10 18:51:29 2020 -0500 use get_postgres_envvars in betyConnect commit 2b0c16831beb215104823f028af244bf1519a695 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Wed Jun 10 23:36:27 2020 +0530 Update book.yml commit 154d961d23bce1b844ae0639efce0dd50c83bb9a Merge: aac4d2adf 5ee572b2e Author: Tezan Sahu <31898274+tezansahu@users.noreply.github.com> Date: Wed Jun 10 22:01:41 2020 +0530 Merge branch 'develop' into api_1 commit aac4d2adf7f7632d09a4a973cf61dc6221ab828a Author: Tezan Sahu Date: Tue Jun 9 16:51:32 2020 +0000 added endpoints for runs commit 55339f32b2bac821bf9eabae1ae942417214e021 Merge: 193e90283 5ee572b2e Author: Michael Dietze Date: Tue Jun 9 09:13:01 2020 -0400 Merge branch 'develop' into develop commit 5ee572b2ee5297d952fdaab900f418996c739ea4 Merge: 654539f98 691e59970 Author: Michael Dietze Date: Tue Jun 9 09:12:07 2020 -0400 Merge pull request #2622 from istfer/replace_tmvtnorm Replace tmvtnorm commit 306024dea93dde365558cf34b1f9a9ab1709301e Author: Tezan Sahu Date: Tue Jun 9 02:57:04 2020 +0000 documentation bugfixes commit 196e9c36c8e87e08103f4f5af98f495fe1984c87 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Tue Jun 9 00:43:40 2020 +0530 Update book.yml commit 8d26b00792a0b93ecb48b667b4643ba0fcc2dbb6 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Tue Jun 9 00:40:28 2020 +0530 Update book.yml commit 245ef87b79fca2755d5e3938851dc1970b6cf691 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Tue Jun 9 00:36:43 2020 +0530 Update book.yml commit e4723ba1cabdfff1e7d9034426e50601106adb73 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Tue Jun 9 00:12:35 2020 +0530 Update book.yml commit 2c045e5b1f593548dbf11a45ad22b57d3ce8d07a Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Mon Jun 8 23:50:15 2020 +0530 updated remote repo commit 9e87b067b3f31cda6ff7d366815e5503f4303873 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Mon Jun 8 22:17:16 2020 +0530 updated repo token commit d23b369757016f543ed6ce4caab0004040ab5918 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Mon Jun 8 19:19:09 2020 +0530 Update book.yml commit 691e599704728db2ed173d6478245240c3a602c2 Merge: 73eb2990c 654539f98 Author: Michael Dietze Date: Mon Jun 8 09:46:16 2020 -0400 Merge branch 'develop' into replace_tmvtnorm commit 654539f98388fcba9dbc16fba56df531cdeef1e5 Merge: 10239e62f c7eef5ac6 Author: Michael Dietze Date: Mon Jun 8 09:45:14 2020 -0400 Merge pull request #2630 from ayushprd/develop changed ic.process to ic_process commit 436104dc32ee67689362020d72f72f4854201a19 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Mon Jun 8 18:18:18 2020 +0530 Update book.yml commit c7eef5ac60d8cd00b30bdb4aca3ced33ad6b9d1c Merge: dcbdc5cba 10239e62f Author: Michael Dietze Date: Mon Jun 8 07:02:29 2020 -0400 Merge branch 'develop' into develop commit dcbdc5cba8991821e2ad766e57a8a570923f4584 Merge: 301a15e78 050b4c1cf Author: Ayush Prasad Date: Mon Jun 8 15:32:15 2020 +0530 Merge pull request #1 from ayushprd/ic-doc-changes changed ic.process to ic_process commit 050b4c1cf0503bf28b625de98254917fd5a8040c Author: Ayush Prasad Date: Mon Jun 8 15:27:51 2020 +0530 changed ic.process to ic_process commit f32b2dda9983ce05a2e87ea5f57c3fc55643c9cc Author: Tezan Sahu Date: Sun Jun 7 14:50:06 2020 +0000 added pagination for workflows endpoint commit a06c1149b81cddb51a6713ef302f3b7cce782bc8 Author: Tezan Sahu Date: Sun Jun 7 12:01:46 2020 +0000 added endpoint to get details of workflow by id commit 182ff64cf4ba5af84cf62424adbf0c9b5eb6065a Author: Tezan Sahu Date: Sun Jun 7 10:56:08 2020 +0000 added api to filter workflow by model/site id [w/o pagination] commit e69454e9674c863db4bce486521223690c744a72 Author: Tezan Sahu Date: Sun Jun 7 10:55:24 2020 +0000 db connections closed commit 0ba03e0dd16069cec41c26aa0a8958d3a342876c Author: Tezan Sahu Date: Sun Jun 7 05:33:52 2020 +0000 removed importing of dependencies commit e1736978c0218692033a7d7bacb9a7bdfb752af6 Author: Tezan Sahu Date: Sat Jun 6 15:18:25 2020 -0500 directory refactoring commit 7e9a2a3bb0c935308008232e29f5f43f384e955c Author: Tezan Sahu Date: Sat Jun 6 14:51:56 2020 -0500 added ping & model api, authentication & swagger yaml commit f87b57f1aaebb8859ceec0445a870308a112633b Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Sat Jun 6 13:05:06 2020 +0530 updated git repo changes commit 36d8119ba49efe567c45729a95a9ac7d96acac2a Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Sat Jun 6 12:57:09 2020 +0530 Update book.yml commit a02c721cbd6e788a1783f3935d2245a1dd48a62f Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Sat Jun 6 12:46:03 2020 +0530 Update book.yml commit e35ed3c16163b10d1df94bc1f021e66b351a2de8 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Sat Jun 6 12:41:36 2020 +0530 Update book.yml commit 1d942df692e86465d1b7c667c5eb638a50096fe7 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Sat Jun 6 12:35:23 2020 +0530 updated shell version commit 94a0ba99004cad1f5603e26146f2fb80fc1aa18e Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Thu Jun 4 20:04:40 2020 +0530 updated container image commit 3429b9aaa886319463550abba1b4cd430d5e18c6 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Thu Jun 4 18:13:32 2020 +0530 updated container image commit aef80825ffb902b3c06a064b1e5ac7bb2e6aad81 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Thu Jun 4 14:46:26 2020 +0530 changes commit 9ae6b777cf94bea414d945ddc3fa8bc5d0abe6e6 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Thu Jun 4 14:42:40 2020 +0530 updated dependencies commit fc1c1eab5a9868f0a6120c39d81335efe190a106 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Thu Jun 4 14:35:07 2020 +0530 updated dependencies commit 030462172ab7223406ea7f3ddf38d831b8ddf171 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Thu Jun 4 14:15:37 2020 +0530 updated image commit a417de0874eb63746e478725f2e5e7d5b9d85052 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Thu Jun 4 14:09:51 2020 +0530 updated container image commit 73eb2990c6082ea36513df0f0c73f993a75b590c Merge: b829beebf 10239e62f Author: istfer Date: Thu Jun 4 03:49:41 2020 -0400 Merge branch 'develop' of github.com:PecanProject/pecan into replace_tmvtnorm commit 193e9028349bdcee27eb01f7272c5016aaa570ae Merge: 7dc0ffe1f 10239e62f Author: David LeBauer Date: Wed Jun 3 17:31:22 2020 -0700 Merge branch 'develop' into develop commit e95c7c144954a0ad445b95f48634becc2c2466e6 Merge: bc252328f 10239e62f Author: David LeBauer Date: Wed Jun 3 17:29:08 2020 -0700 Merge branch 'develop' into docker-develop commit 10239e62f28130508ea99f3d1e679ce0af49ef01 Merge: d39e83ca1 6562b58f4 Author: David LeBauer Date: Wed Jun 3 17:28:31 2020 -0700 Merge pull request #2625 from dlebauer/develop don't override random.effects = TRUE commit 6562b58f4ee8baa90bd0c4f2af566a828f9d6ac7 Author: David LeBauer Date: Wed Jun 3 15:09:15 2020 -0700 Update modules/meta.analysis/R/meta.analysis.R commit bc252328f8a13755ea0f7ee69d6496bf90faadbe Author: Rob Kooper Date: Wed Jun 3 15:47:53 2020 -0500 updates to doc and speedups commit ad7e98384da3f519b1fa3437e52dd174a508dcf0 Merge: dc27dd82d d39e83ca1 Author: David LeBauer Date: Wed Jun 3 13:00:59 2020 -0700 Merge branch 'develop' into biocro_annual_grass commit 5fe037daeafe1de1b0c51c74294268b01c236a67 Author: David LeBauer Date: Wed Jun 3 13:00:32 2020 -0700 Update CHANGELOG.md commit 9521887a66a147be24879839da1abfd7e6ebb59c Author: David LeBauer Date: Wed Jun 3 12:48:27 2020 -0700 don't override random.effects = TRUE commit b829beebf15322bb2bed83a5111f8a1c022eef92 Author: istfer Date: Tue Jun 2 13:33:40 2020 -0400 add issue no commit e1b2d53eb702ce0f080f60c495999ae7973b4d3f Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Tue Jun 2 22:47:28 2020 +0530 added changes commit 9249e462dae827b5559fca57de81213aa59cf876 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Tue Jun 2 22:44:41 2020 +0530 updated container image commit b813fea99a5dd037df063396f61ad340bff00891 Author: istfer Date: Tue Jun 2 11:13:39 2020 -0400 update changelog commit 902fdbfae3e456bd74bb3c3a0a941a19f34a7836 Author: istfer Date: Tue Jun 2 11:11:34 2020 -0400 replace function calls commit 823dcb3c40bd74729565e424266f94362e69bfbe Author: istfer Date: Tue Jun 2 11:04:50 2020 -0400 replace package commit 27dda34d03d3b22ca3369e1e6cfcf61715312264 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Tue Jun 2 20:12:45 2020 +0530 updated image name commit 2c60a175aadf652681dc66c530562e637dcb4eae Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Tue Jun 2 20:08:46 2020 +0530 update container image commit 1c970839bd1dcd188ce646b3d3aea3d030d6686c Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Tue Jun 2 19:18:47 2020 +0530 updated image name commit d19a4a5f3313c1598ef67872756fff760f9954e0 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Tue Jun 2 19:08:35 2020 +0530 updated book.yml commit f09cbb36ab7a19b5efcebf4d07d1a5283753ea18 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Tue Jun 2 19:06:41 2020 +0530 added more updates commit 4332a418597ea9ab232c609b776d05d6f48bc0e8 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Tue Jun 2 17:17:41 2020 +0530 adeed dependencies commit b6ecfd3547336f87e7c0183128d75297e77dc60d Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Tue Jun 2 17:05:43 2020 +0530 updated book.yml commit cbb09ebb0062f3169d557dbe0a323872c44d9580 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Tue Jun 2 16:54:55 2020 +0530 created index.Rmd commit e98615ad302debaead7e1ae58062874dca41963b Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Tue Jun 2 16:52:07 2020 +0530 Delete libudunits2-dev commit e107fb75eb1bbbea18208708f2f30ae41b6f1b24 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Tue Jun 2 16:51:57 2020 +0530 Delete librdf0-dev commit c4e959e5247a0c4fb4fa4a4e8d764bd377ba8396 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Tue Jun 2 16:51:43 2020 +0530 Delete libnetcdf-dev commit c6454359fe48520ff4b3a6629b41fb98a826d783 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Tue Jun 2 16:51:32 2020 +0530 Delete libglu1-mesa-dev commit 10bb55b75a0a295f60d068c4c993243c2fba77ce Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Tue Jun 2 16:51:19 2020 +0530 Delete libglpk-dev commit 33249a12bf69249222ed6fd447219cc5ecd81aed Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Tue Jun 2 16:51:05 2020 +0530 Delete libgl1-mesa-dev commit eb3ef7f3626f6c419248bca2f6f9d95e36cbcf60 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Tue Jun 2 16:50:47 2020 +0530 Delete index.Rmd commit e7382d422de471e1058c012cf4b771b1c9f43575 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Tue Jun 2 16:50:15 2020 +0530 Delete _main.Rmd commit 8680132b465fd709469a080fb6ecea98b8c570a9 Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Tue Jun 2 16:43:22 2020 +0530 Delete ci.yml commit 98ce000961c48e7f453c64eaf34aa482ca525a0f Author: MukulMaheshwari <31155543+MukulMaheshwari@users.noreply.github.com> Date: Tue Jun 2 16:33:33 2020 +0530 create book.yml commit 0fea2c0b2713cde58e8ce5d3992387a4bb6082d3 Author: Mukul Maheshwari Date: Tue Jun 2 15:42:17 2020 +0530 new sandox repo commit 17a00e23da2d9f8f1a2c2652ccc16f2ce7e77864 Merge: 852e081f8 d39e83ca1 Author: Chris Black Date: Tue Jun 2 10:10:29 2020 +0200 Merge branch 'develop' into styler-workflow commit 852e081f8d7539e5e039ba03e207801fcc791bd2 Author: Rahul Agrawal <41531498+rahul799@users.noreply.github.com> Date: Tue Jun 2 10:48:46 2020 +0530 Update styler-actions.yml commit 3de66d005ce4a672ed606666c212947984cb62f8 Author: Rob Kooper Date: Fri May 29 14:24:50 2020 -0500 Update DEV-INTRO.md Co-authored-by: istfer commit 724664f30c249af653e1758d98adee26f65fc612 Author: Rob Kooper Date: Fri May 29 14:24:31 2020 -0500 Update DEV-INTRO.md Co-authored-by: istfer commit d5cb02d3518684fc59e0954ab5f5862dbffd8136 Author: Rob Kooper Date: Fri May 29 14:24:03 2020 -0500 Update DEV-INTRO.md Co-authored-by: istfer commit 109cd7658860bf23f08e468ab9c2ea96ff2d6e5d Author: Rob Kooper Date: Fri May 29 14:23:10 2020 -0500 Update DEV-INTRO.md Co-authored-by: Kristina Riemer commit 46e1f76a3c8ddd856f957816e2cd99d49e7731d6 Author: Rob Kooper Date: Fri May 29 14:22:51 2020 -0500 Update DEV-INTRO.md Co-authored-by: istfer commit 07e1a1504f5fef00f203af176aacfdff845bb285 Author: Rob Kooper Date: Fri May 29 14:21:08 2020 -0500 Update DEV-INTRO.md Co-authored-by: istfer commit ce642787971130de46192d787d4ae94549ec6492 Author: Tezan Sahu Date: Thu May 28 02:29:54 2020 -0500 started creating api endpoints; ping ready commit c473a3b486819fe70dd2147b92bf1cd1a7910f40 Author: Rob Kooper Date: Wed May 27 16:05:08 2020 -0500 use copy instead of rename commit 3fec6287ce493fc0073e10d82885e4dbf2b6b873 Merge: 5ea1169f4 d39e83ca1 Author: Rob Kooper Date: Wed May 27 16:01:37 2020 -0500 Merge branch 'develop' into docker-develop commit 7dc0ffe1fdcb27bbf0943b6ff84b2105b9405acf Author: tezansahu Date: Thu May 28 00:56:13 2020 +0530 modified link for pecan releases commit a035f3cface61fe4e5363a79a3365e6aca9aee14 Author: tezansahu Date: Wed May 27 08:36:41 2020 +0530 modified image for env file & added warning commit 5ea1169f4e9a6eb346c5800fb46df5301f994d3a Author: Rob Kooper Date: Tue May 26 18:48:37 2020 -0500 more updates commit a78cd186a0d1c94e62c43c5248a181c55945b95f Author: Tezan Sahu <31898274+tezansahu@users.noreply.github.com> Date: Tue May 26 20:55:44 2020 +0530 Update 01_install_pecan.Rmd commit 9bc38e499cdafd8e99e7dbb3167ef70f5e024a3d Merge: 3033bc93f bd12ad9d2 Author: tezansahu Date: Tue May 26 20:42:50 2020 +0530 fixes to issues in the docs commit 3033bc93fb789511935c2dbbd63dd4f439eb9d95 Author: tezansahu Date: Tue May 26 20:40:36 2020 +0530 fixes to issues in the docs commit bd12ad9d29a759ea0e68a11fcf20d59d5ed2e165 Author: Tezan Sahu <31898274+tezansahu@users.noreply.github.com> Date: Tue May 26 20:34:12 2020 +0530 Update book_source/02_demos_tutorials_workflows/01_install_pecan.Rmd Co-authored-by: istfer commit ee98276e998c7dfe8f3202dcf7f350d610a40b19 Author: tezansahu Date: Tue May 26 08:11:46 2020 +0530 updated steps used for installing pecan using docker commit faec5ac08f06add16a8f7fda95b4657b160e58d2 Merge: 14eafc215 f0771aaf6 Author: Rob Kooper Date: Sun May 24 11:26:10 2020 -0500 Merge branch 'docker-develop' of github.com:robkooper/pecan into docker-develop commit 14eafc2153d4a2988c798d4d6c069053e9e18377 Author: Rob Kooper Date: Sun May 24 11:26:02 2020 -0500 fix error message commit d9da9d1c7fac3e524cdc539735bdbf1db3744a4a Author: Rob Kooper Date: Sun May 24 11:25:24 2020 -0500 update sipnet docker test fixes #2615 commit d39e83ca1d3236cb05b101ed8092b0d57c93a3ee Merge: bdea9c74e ca8cb7ba7 Author: Rob Kooper Date: Sun May 24 10:11:46 2020 -0500 Merge pull request #2616 from istfer/fix_SA_typo Fixing small typos that casuses errors for SA runs commit ca8cb7ba7f35f58601dc0f52879f73dc91f0d39c Author: istfer Date: Sun May 24 10:37:16 2020 -0400 remove extra commit 656d8e6f3b2878c2ff79d8e563cad33730fa471a Author: istfer Date: Sun May 24 09:22:47 2020 -0400 fix typo commit 2bb5b1a2c16584dacc03d10f3035752a6097c1aa Author: Rahul Agrawal <41531498+rahul799@users.noreply.github.com> Date: Sun May 24 10:19:14 2020 +0530 Update styler-actions.yml commit f0771aaf6ca9cfc928ce65e7b25033d25a26d7b1 Author: Rob Kooper Date: Tue May 19 07:58:54 2020 -0500 Update DEV-INTRO.md Co-authored-by: Chris Black commit 1910db876a7f0689735757cb635d41f0d4812291 Author: Rob Kooper Date: Mon May 18 17:40:51 2020 -0500 fix cp command commit bdea9c74e6ce9ff639a2c2dcbc21ded14c870fb8 Merge: 948d6326f 6b22d2eb9 Author: Michael Dietze Date: Mon May 18 17:23:45 2020 -0400 Merge pull request #2605 from kyclark/ed2.dockerfile setting the default values to match docker.sh so this will build commit e627e3fc32bb0a42d4b93fcddcc90ce797d4452d Author: Rob Kooper Date: Mon May 18 15:13:28 2020 -0500 Update DEV-INTRO.md Co-authored-by: istfer commit 6b22d2eb96b498914aa09c53d5b41e9f0732717d Merge: da17fad70 948d6326f Author: Rob Kooper Date: Mon May 18 12:58:21 2020 -0500 Merge branch 'develop' into ed2.dockerfile commit 948d6326f1675439cf1626a8cfb163d9e28e26af Merge: faaf9cddd 691025f02 Author: Rob Kooper Date: Mon May 18 12:57:17 2020 -0500 Merge pull request #2601 from PecanProject/docker-quickstart-docs add a true docker quickstart commit 691025f028d894fe1ecd4916a87911f0c81c5b03 Merge: 6a3fe92c5 faaf9cddd Author: Rob Kooper Date: Mon May 18 11:49:13 2020 -0500 Merge branch 'develop' into docker-quickstart-docs commit 9989ab40236390ce018feb8542f7955cb312c7be Author: Rob Kooper Date: Mon May 18 11:32:48 2020 -0500 more fixes based on discussion in #2572 commit 5eeeb60f91ca4b961e1925556bffd367c1cecbdc Merge: 5ba4df45c faaf9cddd Author: Rob Kooper Date: Mon May 18 10:08:10 2020 -0500 Merge remote-tracking branch 'upstream/develop' into docker-develop commit faaf9cddddbcf9970f16d7e720884dad4a6444d0 Merge: 4e84f1ddd b34a4cb8d Author: Michael Dietze Date: Mon May 18 09:25:05 2020 -0400 Merge pull request #2611 from ayushprd/2520 Fixed not triggering of "All values bad" in call_MODIS commit b34a4cb8df61ef6f9cb40c5f5eceb4c990f55f5c Merge: c02c2f009 4e84f1ddd Author: Michael Dietze Date: Mon May 18 08:45:52 2020 -0400 Merge branch 'develop' into 2520 commit 4e84f1ddda484406da26c9925d061ee3f6f81333 Merge: 7f290ce99 04236fc07 Author: Michael Dietze Date: Mon May 18 08:45:27 2020 -0400 Merge pull request #2613 from istfer/fix-db-dbsync-crash add .sh commit 04236fc07023b3cd22a4f18579a5dd5f6d36d83f Author: istfer Date: Mon May 18 03:37:37 2020 -0400 add .sh commit 5ba4df45cc8fa45fbddd64673801a68c39dd80aa Merge: 236241366 6591e19f6 Author: Rob Kooper Date: Sat May 16 10:41:26 2020 -0500 Merge branch 'docker-develop' of github.com:robkooper/pecan into docker-develop commit 236241366445f44e14b889f92cec1f588cb042cd Author: Rob Kooper Date: Sat May 16 10:41:24 2020 -0500 added some text to clarify things based on feedback from @istfer commit 6591e19f61dd1693bb47f2ac83dcc326f4b51a6d Author: Rob Kooper Date: Fri May 15 09:59:59 2020 -0500 Update DEV-INTRO.md Co-authored-by: istfer commit 40a15e117ff662d1b8b2c34dab1798b8d5085b8e Author: Rob Kooper Date: Thu May 14 23:18:35 2020 -0500 few more tweaks commit c30e5f6dd0e71165ca1371c5e66cfcc9a54cb8ff Author: Rob Kooper Date: Thu May 14 23:12:22 2020 -0500 fixes based on demo today commit 62b900c76100dca60f2642c88f2c170be680291f Author: Rob Kooper Date: Thu May 14 16:53:12 2020 -0500 Update DEV-INTRO.md Co-authored-by: David LeBauer commit a55a4f45980245eb8560ba256a91529c2dc4cf19 Author: Rob Kooper Date: Thu May 14 16:53:02 2020 -0500 Update DEV-INTRO.md Co-authored-by: istfer commit c02c2f009b1732ef03ca8208742353300d4aeef6 Author: Ayush Prasad Date: Thu May 14 15:03:46 2020 +0530 removed !(is.null(good)) commit 01d0596546ca4c056a761f9305d7ab249eb182ac Author: Rob Kooper Date: Wed May 13 23:49:16 2020 -0500 add note about docker-compose.override.yml commit 63d87af67b3e1250121e88a9120f16d9c6e4c094 Author: Rob Kooper Date: Wed May 13 23:33:02 2020 -0500 update CHANGELOG commit 3e3df92c3ca78234fd58fb0e36df86f77a5fe7b5 Author: Rob Kooper Date: Wed May 13 23:28:22 2020 -0500 how to develop using docker commit 7f290ce9943ce4bf23f0a3e8181d00e9fe9920db Merge: ea5a57c6f 573dfd57d Author: Rob Kooper Date: Wed May 13 17:14:27 2020 -0500 Merge pull request #2608 from robkooper/fix-db-dbsync-build do apt-get before install commit 573dfd57d6852db50b7dd6d206cbc2904eaffe8e Author: Rob Kooper Date: Wed May 13 16:31:36 2020 -0500 do apt-get before install commit ea5a57c6ff1a35d2c6d6b56fb33a231190d7908a Merge: db6b6166a 65aa79a03 Author: Michael Dietze Date: Wed May 13 10:32:53 2020 -0400 Merge pull request #2598 from PecanProject/ed2.2_ed2in Create ED2INv2.2.0 commit 65aa79a03f0f4c277b912b7034328d3a2fe73ecf Merge: ecf4faf32 db6b6166a Author: Michael Dietze Date: Wed May 13 09:08:10 2020 -0400 Merge branch 'develop' into ed2.2_ed2in commit db6b6166a7ba964e5d3563af00d22d72131fb12a Merge: 301a15e78 78b901c9b Author: Michael Dietze Date: Wed May 13 09:07:12 2020 -0400 Merge pull request #2606 from ayushprd/2519 Change order of arguments in call_MODIS() commit ecf4faf3245a0ca5b46060774c92269818ad7f6c Merge: 2eb8a1001 301a15e78 Author: David LeBauer Date: Tue May 12 23:49:44 2020 -0700 Merge branch 'develop' into ed2.2_ed2in commit 6a3fe92c5195759d0dd883da50cb3869a766e6a5 Merge: 2c8eca2d2 301a15e78 Author: David LeBauer Date: Tue May 12 23:46:41 2020 -0700 Merge branch 'develop' into docker-quickstart-docs commit 78b901c9bcffa3892a796783c05ed442a4c474de Author: Ayush Prasad Date: Mon May 11 18:39:53 2020 +0530 generated documentation for call_MODIS commit a6e7c9614bbb224a12cfe7e52413ff9f0ca5e819 Author: Ayush Prasad Date: Mon May 11 16:13:45 2020 +0530 Update CHANGELOG.md commit 40b0e519f774f6b131654ae0d47d5b70eaadfd1a Author: Ayush Prasad Date: Mon May 11 16:04:36 2020 +0530 changed order of arguments, updated docs commit d7eeb8b516c333911d33dc66cf1384ce8b1e3f22 Author: Ayush Prasad Date: Mon May 11 15:18:23 2020 +0530 updated CHANGELOG.md commit da17fad705eee338b2732999982637129b6a11eb Author: Ken Youens-Clark Date: Fri May 8 16:19:20 2020 -0700 setting the default values to match docker.sh so this will build commit 30b96bcdd06508edca4345b0ec7d286195b9d362 Author: Rahul Agrawal <41531498+rahul799@users.noreply.github.com> Date: Sat May 9 00:04:25 2020 +0530 Update styler-actions.yml commit 87066d0b3c6674776a9791a46ebc99d2179bb766 Author: Rahul Agrawal <41531498+rahul799@users.noreply.github.com> Date: Sat May 9 00:03:54 2020 +0530 Update .github/workflows/styler-actions.yml Co-authored-by: Chris Black commit 06340b4af02aac590ee3910a0bedf5bc90458782 Author: Rahul Agrawal <41531498+rahul799@users.noreply.github.com> Date: Sat May 9 00:01:34 2020 +0530 file name updated commit 3801fa22ddef3903daa204671b510cc36714c36f Author: Rahul Agrawal <41531498+rahul799@users.noreply.github.com> Date: Fri May 8 06:59:26 2020 +0530 Update .github/workflows/styler-actions.yml Co-authored-by: Chris Black commit b2e2bd535b4bd00e4cc647d3340c9609f2071272 Author: Rahul Agrawal <41531498+rahul799@users.noreply.github.com> Date: Fri May 8 06:58:19 2020 +0530 Update .github/workflows/styler-actions.yml Co-authored-by: Chris Black commit 301a15e785a2c68b8f4c4e390a1edd6191eae463 Merge: df25a3b85 47a5368c2 Author: Rob Kooper Date: Thu May 7 17:28:22 2020 -0500 Merge pull request #2588 from robkooper/fix-edit-docker fixes not starting after editing files (fixes #2587) commit 2eb8a1001d0cbdae72507d6085ff583325abea53 Merge: 3562c6501 df25a3b85 Author: David LeBauer Date: Thu May 7 15:25:15 2020 -0700 Merge branch 'develop' into ed2.2_ed2in commit 47a5368c21fb16113e2bbfe2d1015028e7f082dd Merge: 635ec80b3 df25a3b85 Author: Rob Kooper Date: Thu May 7 15:44:23 2020 -0500 Merge branch 'develop' into fix-edit-docker commit df25a3b85593d0e0c22e2be90702dec555c49f81 Merge: 90e70d440 54508fc51 Author: Rob Kooper Date: Thu May 7 15:43:58 2020 -0500 Merge pull request #2590 from robkooper/fix-ftp-code ftp has status code 226 for ok commit 3562c65017f619546cbbfa659d57a68b3d6f2b3b Merge: 732ab71a6 90e70d440 Author: David LeBauer Date: Thu May 7 13:32:40 2020 -0700 Merge branch 'develop' of github.com:pecanproject/pecan into ed2.2_ed2in commit 54508fc5135290cc04e874f9dab6253f5195740e Merge: c6c311a0e 90e70d440 Author: Rob Kooper Date: Thu May 7 14:47:30 2020 -0500 Merge branch 'develop' into fix-ftp-code commit 90e70d440acd5cd16c800a243c678e420f30881e Merge: 2109a8c88 064bdaad5 Author: Rob Kooper Date: Thu May 7 14:47:07 2020 -0500 Merge pull request #2596 from rahul799/remove-git-install removing-git-install commit 064bdaad515aeb929f2a5a4bf9b3996a1a1bde96 Merge: 92334ae4a 2109a8c88 Author: Rob Kooper Date: Thu May 7 13:51:00 2020 -0500 Merge branch 'develop' into remove-git-install commit 635ec80b33f8665a20d9b5977d024575d75a780f Merge: b0da91424 2109a8c88 Author: Michael Dietze Date: Thu May 7 12:40:53 2020 -0400 Merge branch 'develop' into fix-edit-docker commit 2109a8c88d92e1f64e1b41c5d50e0e21ebbc8182 Author: David LeBauer Date: Wed May 6 17:56:36 2020 -0700 fix typos in docker.sh commit b0da91424b5553d93e88f3ecb61f81af6d1b6de4 Merge: 07ab6df92 84880c34b Author: Rob Kooper Date: Wed May 6 15:55:43 2020 -0500 Merge branch 'develop' into fix-edit-docker commit 84880c34b7247fcac878b155492fad67dec7e06b Merge: d52018ef8 d52d20565 Author: Rob Kooper Date: Wed May 6 15:55:22 2020 -0500 Merge pull request #2603 from infotroph/docker_depends_fix typo in depends Dockerfile commit 732ab71a6eab45cbbfe5751b0b2f129945d21ed2 Author: Ken Youens-Clark Date: Wed May 6 13:02:43 2020 -0700 version which might one day be compatible with ED 2.2.0 commit d52d2056568e6ebc411ea28368b0c48cf6531434 Author: Chris Black Date: Wed May 6 21:20:10 2020 +0200 == is a bashism commit 92334ae4a9ef401a4827ec936f0748e41be10a07 Merge: fa6d5502e d52018ef8 Author: Chris Black Date: Wed May 6 19:47:18 2020 +0200 Merge branch 'develop' into remove-git-install commit 1ea2cad98f99461844da18f5e6075872df8e3577 Merge: cd8d750bf d52018ef8 Author: Rob Kooper Date: Wed May 6 11:03:56 2020 -0500 Merge branch 'develop' into ed2.2_ed2in commit cd8d750bfa227fb365c97d67ae835cf862e76948 Author: Rob Kooper Date: Wed May 6 11:03:14 2020 -0500 remove git build of ED commit 07138ae5552f72a81e43f5752579425bc1d69780 Author: David LeBauer Date: Wed May 6 08:59:45 2020 -0700 Update CHANGELOG.md commit 088bfd11d50e0218d9a2684ff7e7c9d556a04001 Author: David LeBauer Date: Wed May 6 08:58:45 2020 -0700 Rename ED2INv2.2.0 to ED2IN.2.2.0 per convention, as requested in https://github.com/PecanProject/pecan/pull/2598#pullrequestreview-406275757 commit b9ad6beaea69f5cf0be761230f7f055cf4f39162 Author: David LeBauer Date: Wed May 6 08:57:39 2020 -0700 Update CHANGELOG.md add changes re: ED2IN to changelog commit eeb22ff6738dbb12a63d73fd546ea17f5730a1b3 Author: David LeBauer Date: Wed May 6 08:54:18 2020 -0700 Delete ED2IN.git (#2599) this no longer works with the master branch of the https://github.com/EDmodel/ED2 keeping this unversioned ED2IN around will only cause trouble. commit bde3bc6b75f6d8a8aa169c3b05d6186b2db021ae Merge: de5c50873 2bb2d4bbb Author: David LeBauer Date: Wed May 6 08:53:56 2020 -0700 Merge branch 'develop' into ed2.2_ed2in commit fa6d5502e3e80115596aca229e79c79a9f585b87 Merge: 15e71d953 7a3365ac5 Author: Rob Kooper Date: Tue May 5 23:04:22 2020 -0500 Merge branch 'develop' into remove-git-install commit 2c8eca2d2e6c6066f4f33dc92d124fb49518bda0 Author: David LeBauer Date: Tue May 5 18:06:00 2020 -0700 add a true docker quickstart previously the docker-quickstart link pointed to this file, with the first section ``` ## The PEcAn docker install process in detail {#docker-quickstart} ``` commit de5c5087306c8127e9186f5995cb6475c77eed35 Merge: 1c4bcd9bf 9b02d18aa Author: David LeBauer Date: Tue May 5 17:29:19 2020 -0700 Merge branch 'develop' into ed2.2_ed2in commit 1c4bcd9bf004d387f34b9ee7e8bbc099459ca662 Author: David LeBauer Date: Tue May 5 17:19:42 2020 -0700 Update CHANGELOG.md commit dc1438f5784e2073c75adc2ca6b8ada73e0c232b Author: David LeBauer Date: Tue May 5 14:24:37 2020 -0700 Create ED2INv2.2.0 based on ED2IN.git still need to finish finding and replacing variables commit 15e71d9538590e517f955e0e544f5e010615fd0b Merge: 4a7cc603b 78a657fc2 Author: Rob Kooper Date: Tue May 5 13:18:00 2020 -0500 Merge branch 'develop' into remove-git-install commit 07ab6df924ad8bc37abb59134ee1eec3e845dca4 Merge: e88649f0b c719dc59e Author: Rob Kooper Date: Tue May 5 12:58:38 2020 -0500 Merge branch 'develop' into fix-edit-docker commit 4a7cc603b9e18a4a0b9ce90766e339772f3b5c4e Author: Rahul Agrawal <41531498+rahul799@users.noreply.github.com> Date: Tue May 5 22:53:16 2020 +0530 removing-git-install commit 31e85a60a87513b07a67917df3e82a0ae564a56b Author: Rahul Agrawal <41531498+rahul799@users.noreply.github.com> Date: Tue May 5 20:33:44 2020 +0530 Update .github/workflows/styler-actions.yml Co-authored-by: Chris Black commit d80db13539fd97694866a680f18edf37113e9413 Author: Rahul Agrawal <41531498+rahul799@users.noreply.github.com> Date: Tue May 5 18:47:02 2020 +0530 removed unwanted step commit e88649f0bd408cd3b40bdd9fe78f1050cb972027 Merge: fd856a6ba 4da2f3943 Author: Michael Dietze Date: Tue May 5 06:42:30 2020 -0400 Merge branch 'develop' into fix-edit-docker commit c6c311a0ee59e606d9fe52738595c3ace7b280e3 Author: Rob Kooper Date: Wed Apr 29 13:48:22 2020 -0500 remove debug statement commit 0aaa2714690c9e2b1d6348e66c102d82627392a8 Author: Rob Kooper Date: Wed Apr 29 13:29:00 2020 -0500 fix missing namespace commit 5560e0b4dcb45bf89ef255c8e77c72a3d1c520c7 Author: Rob Kooper Date: Wed Apr 29 12:37:49 2020 -0500 remove timeout will try servers once, if no versions.txt add to ignored list (and use gray color). This is reset on update servers. commit 613547ca5496e0cc19645f33a4f1c8c4a653e6c7 Author: Rob Kooper Date: Wed Apr 29 10:23:48 2020 -0500 ftp has status code 226 for ok commit fd856a6baca7feb8c4ce7efc9a28a61ad47b3041 Author: Rob Kooper Date: Mon Apr 27 18:42:32 2020 -0500 run workflow and stop in case of advanced, and continue commit 079d4772ceaa78bb77a939fcce7ec6d737455e47 Author: Rob Kooper Date: Mon Apr 27 16:31:05 2020 -0500 fixes not starting after editing files (fixes #2587) commit 86533c206a0fa1858747ee3340b44606a7f343a6 Author: Rahul Agrawal <41531498+rahul799@users.noreply.github.com> Date: Fri Apr 24 13:46:28 2020 +0530 Update styler-actions.yml commit 8c4a947396a272c8e0ffd4c9d69ea9327bb3a099 Author: Rahul Agrawal <41531498+rahul799@users.noreply.github.com> Date: Thu Apr 23 19:10:29 2020 +0530 Update styler-actions.yml commit f4a04ee62b3e4145d2c3048ddf50a371eb538f32 Author: Rahul Agrawal <41531498+rahul799@users.noreply.github.com> Date: Tue Apr 14 14:21:52 2020 +0530 Update styler-actions.yml commit 5b149af5335698a7b440bd016bd18c0a87236b66 Author: Rahul Agrawal <41531498+rahul799@users.noreply.github.com> Date: Tue Apr 14 14:13:44 2020 +0530 Update styler-actions.yml commit b700de87bd0c5fcbfcb778d263111154e4f07847 Author: Rahul Agrawal <41531498+rahul799@users.noreply.github.com> Date: Tue Apr 14 14:01:18 2020 +0530 Update styler-actions.yml commit 0e9daf7bb7ca6300c2550c1a7f6960a9341c0b0e Author: Rahul Agrawal <41531498+rahul799@users.noreply.github.com> Date: Tue Apr 14 13:59:22 2020 +0530 Create styler-actions.yml commit dc27dd82de6812553116276a0b9e044779e8d619 Author: David LeBauer Date: Thu Dec 19 15:53:14 2019 -0700 clarifying comment commit f4d2eff335753a77769bf95e86cde50f31faebbc Author: David LeBauer Date: Thu Dec 19 15:51:27 2019 -0700 Update CHANGELOG.md commit 646c7a0e2eab5b94aacce6c8a2fda1bafeaec834 Author: David LeBauer Date: Thu Dec 19 15:49:00 2019 -0700 no carbon storage across years for annual grasses When there are multiple years, always restart annual plants from seed. Otherwise (I think) these would be handled the same as Miscanthus + Switchgrass or Sugarcane --- .github/workflows/book.yml | 61 + .github/workflows/ci.yml | 14 - .github/workflows/styler-actions.yml | 79 + .gitignore | 3 - CHANGELOG.md | 14 + DEV-INTRO.md | 346 ++- actions/book.yml | 31 + apps/api/Dockerfile | 30 + apps/api/R/auth.R | 81 + apps/api/R/entrypoint.R | 37 + apps/api/R/general.R | 30 + apps/api/R/models.R | 42 + apps/api/R/runs.R | 105 + apps/api/R/workflows.R | 128 ++ apps/api/README.md | 20 + apps/api/pecanapi-spec.yml | 442 ++++ apps/api/test_pecanapi.sh | 7 + base/db/R/query.dplyr.R | 45 +- base/db/R/query.format.vars.R | 16 +- base/logger/DESCRIPTION | 2 +- base/remote/DESCRIPTION | 2 +- base/settings/R/check.all.settings.R | 2 +- base/utils/R/sensitivity.R | 5 +- base/workflow/inst/batch_run.R | 3 +- base/workflow/inst/permutation_tests.R | 1 - .../01_install_pecan.Rmd | 34 +- .../07_remote_access/01_pecan_api.Rmd | 475 ++++ .../03_topical_pages/09_standalone_tools.Rmd | 2 +- .../03_topical_pages/11_adding_to_pecan.Rmd | 8 +- .../94_docker/02_quickstart.Rmd | 82 +- book_source/Makefile | 7 +- book_source/figures/env-file.PNG | Bin 0 -> 92110 bytes docker-compose.dev.yml | 105 + docker-compose.yml | 52 +- docker.sh | 24 +- docker/depends/Dockerfile | 2 +- docker/depends/pecan.depends | 2 +- docker/env.example | 2 +- docker/executor/Dockerfile | 2 +- docker/executor/executor.py | 8 + models/biocro/R/call_biocro.R | 91 +- models/ed/Dockerfile | 6 +- models/ed/R/model2netcdf.ED2.R | 21 +- models/ed/inst/ED2IN.git | 1259 ----------- models/ed/inst/ED2IN.r2.2.0 | 1997 +++++++++++++++++ modules/assim.batch/DESCRIPTION | 2 +- modules/assim.batch/R/hier.mcmc.R | 20 +- modules/assim.batch/R/pda.utils.R | 8 +- .../scripts/benchmark.workflow.FATES_BCI.R | 2 +- modules/data.atmosphere/R/met.process.R | 47 +- modules/data.atmosphere/R/met.process.stage.R | 1 + modules/data.atmosphere/man/browndog.met.Rd | 20 +- .../data.atmosphere/man/db.site.lat.lon.Rd | 9 +- .../data.atmosphere/man/met.process.stage.Rd | 2 +- .../tests/Rcheck_reference.log | 5 +- modules/data.remote/R/call_MODIS.R | 20 +- modules/data.remote/inst/bands2lai_snap.py | 47 + modules/data.remote/inst/bands2ndvi.py | 46 + modules/data.remote/inst/gee2pecan_bands.py | 72 + modules/data.remote/inst/gee2pecan_smap.py | 152 ++ modules/data.remote/inst/remote_process.py | 118 + .../inst/satellitetools/biophys_xarray.py | 235 ++ .../data.remote/inst/satellitetools/gee.py | 716 ++++++ .../inst/satellitetools/test.geojson | 38 + modules/data.remote/man/call_MODIS.Rd | 24 +- modules/emulator/DESCRIPTION | 4 +- modules/emulator/R/minimize.GP.R | 10 +- modules/meta.analysis/R/meta.analysis.R | 9 +- scripts/compile.sh | 3 + shiny/BrownDog/server.R | 10 +- shiny/ViewMet/server.R | 2 +- shiny/dbsync/Dockerfile | 10 +- shiny/dbsync/app.R | 45 +- tests/docker.sipnet.xml | 67 + web/04-runpecan.php | 6 +- web/07-continue.php | 29 +- 76 files changed, 5910 insertions(+), 1594 deletions(-) create mode 100644 .github/workflows/book.yml create mode 100644 .github/workflows/styler-actions.yml create mode 100644 actions/book.yml create mode 100644 apps/api/Dockerfile create mode 100644 apps/api/R/auth.R create mode 100644 apps/api/R/entrypoint.R create mode 100644 apps/api/R/general.R create mode 100644 apps/api/R/models.R create mode 100644 apps/api/R/runs.R create mode 100644 apps/api/R/workflows.R create mode 100644 apps/api/README.md create mode 100644 apps/api/pecanapi-spec.yml create mode 100644 apps/api/test_pecanapi.sh create mode 100644 book_source/03_topical_pages/07_remote_access/01_pecan_api.Rmd create mode 100644 book_source/figures/env-file.PNG create mode 100644 docker-compose.dev.yml delete mode 100644 models/ed/inst/ED2IN.git create mode 100644 models/ed/inst/ED2IN.r2.2.0 create mode 100644 modules/data.remote/inst/bands2lai_snap.py create mode 100644 modules/data.remote/inst/bands2ndvi.py create mode 100644 modules/data.remote/inst/gee2pecan_bands.py create mode 100644 modules/data.remote/inst/gee2pecan_smap.py create mode 100644 modules/data.remote/inst/remote_process.py create mode 100644 modules/data.remote/inst/satellitetools/biophys_xarray.py create mode 100755 modules/data.remote/inst/satellitetools/gee.py create mode 100644 modules/data.remote/inst/satellitetools/test.geojson create mode 100755 scripts/compile.sh create mode 100644 tests/docker.sipnet.xml diff --git a/.github/workflows/book.yml b/.github/workflows/book.yml new file mode 100644 index 00000000000..a633707d5f1 --- /dev/null +++ b/.github/workflows/book.yml @@ -0,0 +1,61 @@ +on: + push: + branches: + - master + - develop + - release/* + tags: + - v1 + - v1* + +# render book +name: renderbook + +jobs: + bookdown: + name: Render-Book + runs-on: ubuntu-latest + container: pecan/base:latest + steps: + - uses: actions/checkout@v1 + - uses: r-lib/actions/setup-r@v1 + - uses: r-lib/actions/setup-pandoc@v1 + - name: Install rmarkdown + run: Rscript -e 'install.packages(c("rmarkdown","bookdown"))' + - name: Render Book + run: cd book_source && Rscript -e 'options(bookdown.render.file_scope=FALSE); bookdown::render_book("index.Rmd")' + - uses: actions/upload-artifact@v2 + with: + name: _book + path: book_source/_book/ + + + checkout-and-deploy: + runs-on: ubuntu-latest + needs: bookdown + steps: + - name: Download artifact + uses: actions/download-artifact@v2 + with: + # Artifact name + name: _book # optional + # Destination path + path: _book/ # optional + # repo-token: ${{ secrets.GITHUB_TOKEN }} + - name: Checkout documentation repo + uses: actions/checkout@v2 + with: + repository: ${{ github.repository_owner }}/pecan-documentation + path: pecan-documentation + token: ${{ secrets.GH_PAT }} + - run: | + export VERSION=${GITHUB_REF##*/}_test + cd pecan-documentation && mkdir -p $VERSION + git config --global user.email "pecanproj@gmail.com" + git config --global user.name "GitHub Documentation Robot" + rsync -a --delete ../_book/ $VERSION + git add --all * + git commit -m "Build book from pecan revision $GITHUB_SHA" || true + git push -q origin master + + diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 518c4aad5de..7965ee45819 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -23,20 +23,6 @@ jobs: runs-on: ubuntu-latest container: pecan/depends:develop steps: - - name: check git version - id: gitversion - run: | - v=$(git --version | grep -oE '[0-9\.]+') - v='cat(numeric_version("'${v}'") < "2.18")' - echo "##[set-output name=isold;]$(Rscript -e "${v}")" - - name: upgrade git if needed - # Hack: actions/checkout wants git >= 2.18, rocker 3.5 images have 2.11 - # Assuming debian stretch because newer images have git >= 2.20 already - if: steps.gitversion.outputs.isold == 'TRUE' - run: | - echo 'deb http://deb.debian.org/debian stretch-backports main' >> /etc/apt/sources.list - apt-get update - apt-get -t stretch-backports upgrade -y git - uses: actions/checkout@v2 - run: mkdir -p "${HOME}${R_LIBS#'~'}" shell: bash diff --git a/.github/workflows/styler-actions.yml b/.github/workflows/styler-actions.yml new file mode 100644 index 00000000000..ff3869db622 --- /dev/null +++ b/.github/workflows/styler-actions.yml @@ -0,0 +1,79 @@ +on: + issue_comment: + types: [created] +name: Commands +jobs: + style: + if: startsWith(github.event.comment.body, '/style') + name: style + runs-on: macOS-latest + steps: + - id: file_changes + uses: trilom/file-changes-action@v1.2.3 + - name: testing + run: echo '${{ steps.file_changes.outputs.files_modified}}' + - uses: actions/checkout@v2 + - uses: r-lib/actions/pr-fetch@master + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + - uses: r-lib/actions/setup-r@master + - name: Install dependencies + run: | + Rscript -e 'install.packages(c("styler", "devtools"), repos = "cloud.r-project.org")' + Rscript -e 'devtools::install_version("roxygen2", version = "7.0.2", repos = "http://cran.us.r-project.org")' + - name: string operations + shell: bash + run: | + echo '${{ steps.file_changes.outputs.files_modified}}' > names.txt + cat names.txt | tr -d '[]' > changed_files.txt + text=$(cat changed_files.txt) + IFS=',' read -ra ids <<< "$text" + for i in "${ids[@]}"; do if [[ "$i" == *.R\" || "$i" == *.Rmd\" ]]; then echo "$i" >> files_to_style.txt; fi; done + - name: Upload artifacts + uses: actions/upload-artifact@v1 + with: + name: artifacts + path: files_to_style.txt + - name: Style + run: for i in $(cat files_to_style.txt); do Rscript -e "styler::style_file("$i")"; done + - name: commit + run: | + git add \*.R + git add \*.Rmd + if [ "$(git diff --name-only --cached)" != "" ]; then git commit -m 'automated syle update' ; fi + - uses: r-lib/actions/pr-push@master + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + + + check: + needs: [style] + runs-on: ubuntu-latest + container: pecan/depends:develop + steps: + - uses: actions/checkout@v2 + - uses: r-lib/actions/pr-fetch@master + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + - uses: r-lib/actions/setup-r@master + - name : download artifacts + uses: actions/download-artifact@v1 + with: + name: artifacts + - name : make + shell: bash + run: | + cut -d / -f 1-2 artifacts/files_to_style.txt | tr -d '"' > changed_dirs.txt + cat changed_dirs.txt + sort changed_dirs.txt | uniq > needs_documenting.txt + cat needs_documenting.txt + for i in $(cat needs_documenting.txt); do make .doc/${i}; done + - name: commit + run: | + git config --global user.email "pecan_bot@example.com" + git config --global user.name "PEcAn stylebot" + git add \*.Rd + if [ "$(git diff --name-only --cached)" != "" ]; then git commit -m 'automated documentation update' ; fi + - uses: r-lib/actions/pr-push@master + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.gitignore b/.gitignore index badc2505dbf..1e523cbe68f 100644 --- a/.gitignore +++ b/.gitignore @@ -99,6 +99,3 @@ contrib/modellauncher/modellauncher # don't checkin renv /renv/ - -# ignore IP mapping to lat/lon (is about 65MB) -shiny/dbsync/IP2LOCATION-LITE-DB5.BIN diff --git a/CHANGELOG.md b/CHANGELOG.md index 19e373161d5..dab05d00119 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,8 @@ For more information about this file see also [Keep a Changelog](http://keepacha ### Fixed +- Use initial biomass pools for Sorghum and Setaria #2495, #2496 +- PEcAn.DB::betyConnect() is now smarter, and will try to use either config.php or environment variables to create a connection. It has switched to use db.open helper function (#2632). - PEcAn.utils::tranformstats() assumed the statistic names column of its input was a factor. It now accepts character too, and returns the same class given as input (#2545). - fixed and added tests for `get.rh` function in PEcAn.data.atmosphere - Invalid .zenodo.json that broke automatic archiving on Zenodo ([b56ef53](https://github.com/PecanProject/pecan/commit/b56ef53888d73904c893b9e8c8cfaeedd7b1edbe)) @@ -21,8 +23,12 @@ For more information about this file see also [Keep a Changelog](http://keepacha - When building sipnet model would not set correct model version - Update pecan/depends docker image to have latest Roxygen and devtools. - Update ED docker build, will now build version 2.2.0 and git +- Do not override meta-analysis settings random-effects = FALSE https://github.com/PecanProject/pecan/pull/2625 +- model2netcdf.ED2 no longer detecting which varibles names `-T-` files have based on ED2 version (#2623) ### Changed + +- Replaced `tmvtnorm` package with `TruncatedNormal` package for speed up per #2621. - Continuous integration changes: Added experimental GitHub Actions CI builds (#2544), streamlined Travis CI builds, added a fourth R version (second-newest old release; currently R 3.5) to Travis test matrix (#2592). - Functions that update database entries no longer pass `created_at` or `updated_at` timestamps. The database now updates these itself and ensures they are consistently in UTC (#1083). - `PEcAn.DB::insert_table` now uses `DBI::dbAppendTable` internally instead of manually constructed SQL (#2552). @@ -34,8 +40,13 @@ For more information about this file see also [Keep a Changelog](http://keepacha - No longer writing an arbitrary num for each PFT, this was breaking ED runs potentially. - The pecan/data container has no longer hardcoded path for postgres - PEcAn.JULES: Removed dependency on `ncdf4.helpers` package, which has been removed from CRAN (#2511). +- data.remote: Arguments to the function `call_MODIS()` have been changed (issue #2519). ### Added + +- Documentation in [DEV-INTRO.md](DEV-INTRO.md) on development in a docker environment (#2553) +- PEcAn API that can be used to talk to PEcAn servers. Endpoints to GET the details about the server that user is talking to, PEcAn models, workflows & runs. Authetication enabled. (#2631) +- New versioned ED2IN template: ED2IN.2.2.0 (#2143) (replaces ED2IN.git) - model_info.json and Dockerfile to template (#2567) - Dockerize BASGRA_N model. - Basic coupling for models BASGRA_N and STICS. @@ -47,10 +58,13 @@ For more information about this file see also [Keep a Changelog](http://keepacha - New shiny application to show database synchronization status (shiny/dbsync) ### Removed + +- Removed ED2IN.git (#2599) 'definitely going to break things for people' - but they can still use PEcAn <=1.7.1 - Database maintenance scripts `vacuum.bety.sh` and `reindex.bety.sh` have been moved to the [BeTY database repository](https://github.com/PecanProject/bety) (#2563). - Scripts `dump.pgsql.sh` and `dump.mysql.sh` have been deleted. See the ["BeTY database administration"](https://pecanproject.github.io/pecan-documentation/develop/database.html) chapter of the PEcAn documentation for current recommendations (#2563). - Old dependency management scripts `check.dependencies.sh`, `update.dependencies.sh`, and `install_deps.R` have been deleted. Use `generate_dependencies.R` and the automatic dependency handling built into `make install` instead (#2563). + ## [1.7.1] - 2018-09-12 ### Fixed diff --git a/DEV-INTRO.md b/DEV-INTRO.md index b1881ba44f4..b559c3fc07a 100644 --- a/DEV-INTRO.md +++ b/DEV-INTRO.md @@ -1,59 +1,325 @@ -PEcAn Development -================= +# PEcAn Development -Directory Structure -------------------- +This is a minimal guide to getting started with PEcAn development under Docker. You can find more information about docker in the [pecan documentation](https://pecanproject.github.io/pecan-documentation/master/docker-index.html). -### pecan/ +## Git Repository and Workflow -* modules/ Contains the modules that make up PEcAn -* web/ The PEcAn web app to start a run and view outputs. -* models/ Code to create the model specific configurations. -* documentation/ Documentation about what PEcAn is and how to use it. +We recommend following the the [gitflow](https://nvie.com/posts/a-successful-git-branching-model/) workflow and working in your own [fork of the PEcAn repsitory](https://help.github.com/en/github/getting-started-with-github/fork-a-repo). See the [PEcAn developer guide](book_source/02_demos_tutorials_workflows/05_developer_workflows/02_git/01_using-git.Rmd) for further details. In the `/scripts` folder there is a script called [syncgit.sh](scripts/syncgit.sh) that will help with synchronizing your fork with the official repository. -### Modules (R packages) +To clone the PEcAn repository: -* General -** all -** utils -** db -* Analysis -** modules/meta.analysis -** modules/uncertainty" -** modules/data.land -** modules/data.atmosphere -** modules/assim.batch -** modules/assim.sequential -** modules/priors -* Model Interfaces -** models/ed -** models/sipnet -** models/biocro +```sh +git clone git@github.com:pecanproject/pecan +cd pecan +# alternatively, if you haven't set up ssh keys with GitHub +# git clone https://github.com/PecanProject/pecan +``` + +## Developing in Docker + +The use of Docker in PEcAn is described in detail in the [PEcAn documentation](https://pecanproject.github.io/pecan-documentation/master/docker-index.html). This is intended as a quick start. + +### Installing Docker + +To install Docker and docker-compose, see the docker documentation: +- Docker Desktop in [Mac OSX](https://docs.docker.com/docker-for-mac/install/) or [Windows](https://docs.docker.com/docker-for-windows/install/) +- Docker (e.g. [Ubuntu](https://docs.docker.com/compose/install/)) and [docker-compose](https://docs.docker.com/compose/install/) on your linux operating system. + +_Note for Linux users:_ add your user to the docker group. This will prevent you from having to use `sudo` to start the docker containers, and makes sure that any file that is written to a mounted volume is owned by you. This can be done using +```sh +# for linux users +sudo adduser ${USER} docker`. +``` + +### Deploying PEcAn in Docker + +To get started with development in docker we need to bring up the docker stack first. In the main pecan folder you will find the [docker-compose.yml](docker-compose.yml) file that can be used to bring up the pecan stack. There is also the [docker-compose.dev.yaml](docker-compose.dev.yaml) file that adds additional containers, and changes some services to make it easier for development. + +By default docker-compose will use the files `docker-compose.yml` and `docker-compose.override.yml`. We will use the default `docker-compose.yml` file from PEcAn. The `docker-compose.override.yml` file can be used to configure it for your specific environment, in our case we will use it to setup the docker environment for development. Copy the `docker-compose.dev.yml` file to `docker-compose.override.yml` to start working with your own override file, i.e. : + +For Linux/MacOSX + +``` +cp docker-compose.dev.yml docker-compose.override.yml +``` + +For Windows + +``` +copy docker-compose.dev.yml docker-compose.override.yml +``` + +You can now use the command `docker-compose` to work with the containers setup for development. **The rest of this document assumes you have done this step.** +### First time setup -#### List of modules +The steps in this section only need to be done the fist time you start working with the stack in docker. After this is done you can skip these steps. You can find more detail about the docker commands in the [pecan documentation](https://pecanproject.github.io/pecan-documentation/master/docker-index.html). -Installing PEcAn ----------------- +* setup .env file +* create folders to hold the data +* load the postgresql database +* load some test data +* copy all R packages (optional but recommended) +* setup for web folder development (optional) -### Virtual Machine +#### .env file -* Fastest way to get started -* see PEcAn demo ... +You can copy the [`docker/env.example`](docker/env.example) file as .env in your pecan folder. The variables we want to modify are: -### Installing from source +For Linux/MacOSX -#### From GitHub +```sh +cp docker/env.example ./env +``` + +For Windows + +``` +copy docker/env.example ./env +``` + +* `COMPOSE_PROJECT_NAME` set this to pecan, the prefix for all containers +* `PECAN_VERSION` set this to develop, the docker image we start with + +Both of these variables should also be uncommented by removing the # preceding them. At the end you should see the following if you run the following command `egrep -v '^(#|$)' .env`. If you have a windows system, you will need to set the variable PWD as well, and for linux you will need to set UID and GID (for rstudio). + +For Linux ``` -library(devtools) -install_github("pecan", "PEcAnProject") +echo "COMPOSE_PROJECT_NAME=pecan" >> .env +echo "PECAN_VERSION=develop" >> .env +echo "UID=$(id -u)" >> .env +echo "GID=$(id -g)" >> .env ``` -#### "Makefile" +For MacOSX ``` -./scripts/build.sh -install # installs all R packages -./scripts/build.sh -h # list other options +echo "COMPOSE_PROJECT_NAME=pecan" >> .env +echo "PECAN_VERSION=develop" >> .env +``` + +For Windows: + +``` +echo "COMPOSE_PROJECT_NAME=pecan" >> .env +echo "PECAN_VERSION=develop" >> .env +echo "PWD=%CD%" >> .env +``` + +Once you have setup `docker-compose.override.yml` and the `.env` files, it is time to pull all docker images that will be used. Doing this will make sure you have the latest version of those images on your local system. + +``` +docker-compose pull +``` + +#### folders (optional) + +The goal of the development is to share the development folder with your container, whilst minimizing the latency. What this will do is setup the folders to allow for your pecan folder to be shared, and keep the rest of the folders managed by docker. Some of this is based on a presentation done during [DockerCon 2020](https://docker.events.cube365.net/docker/dockercon/content/Videos/92BAM7vob5uQ2spZf). In this talk it is recommended to keep the database on the filesystem managed by docker, as well as any other folders that are not directly modified on the host system (not using the docker managed volumes could lead to a large speed loss when reading/writing to the disk). The `docker-compose.override.yml` can be modified to copy all the data to the local filesystem, you will need to comment out the appropriate blocks. If you are sharing more than the pecan home directory you will need to make sure that these folder exist. As from the video, it is recommended to keep these folders outside of the actual pecan folder to allow for better caching capabilities of the docker system. + +If you have commented out the volumes in `docker-compose.override.yml` you will need to create the folders. Assuming you have not modified the values, you can do this with: + +``` +mkdir -p $HOME/volumes/pecan/{lib,pecan,portainer,postgres,rabbitmq,traefik} +``` + + +The following volumes are specified: + +- **pecan_home** : is the checked out folder of PEcAn. This is shared with the executor and rstudio container allowing you to share and compile PEcAn. (defaults to current folder) +- **pecan_web** : is the checked out web folder of PEcAn. This is shared with the web container allowing you to share and modify the PEcAn web app. (defaults to web folder in the current folder) +- **pecan_lib** : holds all the R packages for the specific version of PEcAn and R. This folder will be shared amongst all other containers, and will contain the compiled PEcAn code. (defaults to managed by docker, or $HOME/volumes/pecan/lib) +- **pecan** this holds all the data, such as workflows and any downloaded data. (defaults to managed by docker, or $HOME/volumes/pecan/pecan) +- **traefik** holds persisent data for the web proxy, that directs incoming traffic to the correct container. (defaults to managed by docker, or $HOME/volumes/pecan/traefik) +- **postgres** holds the actual database data. If you want to backup the database, you can stop the postgres container, zip up the folder. (defaults to managed by docker, or $HOME/volumes/pecan/postgres) +- **rabbitmq** holds persistent information of the message broker (rabbitmq). (defaults to managed by docker, or $HOME/volumes/pecan/rabbitmq) +- **portainer** if you enabled the portainer service this folder is used to hold persistent data for this service. You will need to enable this service. (defaults to managed by docker, or $HOME/volumes/pecan/portainer) + +These folders will hold all the persistent data for each of the respective containers and can grow. For example the postgres database is multiple GB. The pecan folder will hold all data produced by the workflows, including any downloaded data, and can grow to many giga bytes. + +#### postgresql database + +First we bring up postgresql (we will start RabbitMQ as well since it takes some time to start): + +``` +docker-compose up -d postgres rabbitmq +``` + +This will start postgresql and rabbitmq. We need to wait for a few minutes (you can look at the logs using `docker-compose logs postgres`) to see if it is ready. + +Once the database has finished starting up we will initialize the database. Now you can load the database using the following commands. The first command will make sure we have the latest version of the image, the second command will actually load the information into the database. + +``` +docker pull pecan/db +docker run --rm --network pecan_pecan pecan/db +``` + + +Once that is done we create two users for BETY, first user is the guest user that you can use to login in the BETY interface. The second user is a user with admin rights. + +``` +docker-compose run --rm bety user guestuser guestuser "Guest User" guestuser@example.com 4 4 +docker-compose run --rm bety user carya illinois "Carya Demo User" carya@example.com 1 1 +``` + +#### load example data + +Once the database is loaded we can add some example data, some of the example runs and runs for the ED model, assume some of this data is available. This can take some time, but all the data needed will be copied to the `/data` folder in the pecan containers. As with the database we first pull the latest version of the image, and then execute the image to copy all the data: + +``` +docker pull pecan/data:develop +docker run -ti --rm --network pecan_pecan --volume pecan_pecan:/data --env FQDN=docker pecan/data:develop +``` + +#### copy R packages (optional but recommended) + +Next copy the R packages from a container to volume `pecan_lib`. This is not really needed, but will speed up the process of the first compilation. Later we will put our newly compiled code here as well. This folder is shared with all PEcAn containers, allowing you to compile the code in one place, and have the compiled code available in all other containers. For example modify the code for a model, allows you to compile the code in rstudio container, and see the results in the model container. + +You can copy all the data using the following command. This will copy all compiled packages to your local machine. + +``` +docker run -ti --rm -v pecan_lib:/rlib pecan/base:develop cp -a /usr/local/lib/R/site-library/. /rlib/ +``` + +#### copy web config file (optional) + +The `docker-compose.override.yml` file has a section that will enable editing the web application. This is by default commented out. If you want to uncoment it you will need to first copy the config.php from the docker/web folder. You can do this using + +For Linux/MacOSX -``` \ No newline at end of file +``` +cp docker/web/config.docker.php web/config.php +``` + +For Windows + +``` +copy docker\web\config.docker.php web\config.php +``` + + + +### PEcAn Development + +To begin development we first have to bring up the full PEcAn stack. This assumes you have done once the steps above. You don't need to stop any running containers, you can use the following command to start all containers. At this point you have PEcAn running in docker. + +``` +docker-compose up -d +``` + +The current folder (most likely your clone of the git repository) is mounted in some containers as `/pecan`, and in the case of rstudio also in your home folder as `pecan`. You can see which containers exactly in `docker-compose.override.yml`. + +You can now modify the code on your local machine, or you can use [rstudio](http://localhost:8000) in the docker stack. Once you made changes to the code you can compile the code either in the terminal of rstudio (`cd pecan && make`) or using `./scripts/compile.sh` from your machine (latter is nothing more than a shell script that runs `docker-compose exec executor sh -c 'cd /pecan && make'`. + +The compiled code is written to `/usr/local/lib/R/site-library` which is mapped to `volumes/lib` on your machine. This same folder is mounted in many other containers, allowing you to share the same PEcAn modules in all containers. Now if you change a module, and compile all other containers will see and use this new version of your module. + +To compile the PEcAn code you can use the make command in either the rstudio container, or in the executor container. The script [`compile.sh`](sripts/compile.sh) will run make inside the executor container. + +### Workflow Submission + +You can submit your workflow either in the executor container or in rstudio container. For example to run the `docker.sipnet.xml` workflow located in the tests folder you can use: + +``` +docker-compose exec executor bash +# inside the container +cd /pecan/tests +R CMD ../web/workflow.R docker.sipnet.xml +``` + +A better way of doing this is developed as part of GSOC, in which case you can leverage of the restful interface defined, or using the new R PEcAn API package. + +# Directory Structure + +Following are the main folders inside the pecan repository. + +### base (R packages) + +These are the core packages of PEcAn. Most other packages will depend on the packages in this folder. + +### models (R packages) + +Each subfolder contains the required pieces to run the model in PEcAn + +### modules (R packages) + +Contains packages that either do analysis, or download and convert different data products. + +### web (PHP + javascript) + +The Pecan web application + +### shiny (R + shiny) + +Each subfolder is its own shiny application. + +### book_source (RMarkdown) + +The PEcAn documentation that is compiled and uploaded to the PEcAn webpage. + +### docker + +Some of the docker build files. The Dockerfiles for each model are placed in the models folder. + +### scripts + +Small scripts that are used as part of the development and installation of PEcAn. + +# Advanced Development Options + +## Reset all containers/database + +If you want to start from scratch and remove all old data, but keep your pecan checked out folder, you can remove the folders where you have written the data (see `folders` below). You will also need to remove any of the docker managed volumes. To see all volumes you can do `docker volume ls -q -f name=pecan`. If you are sure, you can either remove them one by one, or remove them all at once using the command below. **THIS DESTROYS ALL DATA IN DOCKER MANAGED VOLUMES.**. + +``` +docker volume rm $(docker volume ls -q -f name=pecan) +``` + +If you changed the docker-compose.override.yml file to point to a location on disk for some of the containers (instead of having them managed by docker) you will need to actually delete the data on your local disk, docker will NOT do this. + +## Reset the lib folder + +If you want to reset the pecan lib folder that is mounted across all machines, for example when there is a new version of PEcAn or a a new version of R, you will need to delete the volume pecan_lib, and repopulate it. To delete the volume use the following command, and then look at "copy R packages" to copy the data again. + +``` +docker-compose down +docker rm pecan_lib +``` + +## Linux and User permissions + +(On Mac OSX and Windows files should automatically be owned by the user running the docker-compose commands). + +If you use mounted folders, make sure that these folders are writable by the containers. Docker on Linux will try to preserve the file permissions. To do this it might be necessary for the folders to have rw permissions. This can be done by using `chmod 777 $HOME/volumes/pecan/{lib,pecan,portainer,postgres,rabbitmq,traefik}`. + +This will leverage of NFS to mount the file system in your local docker image, changing the files to owned by the user specified in the export file. Try to limit this to only your PEcAn folder since this will allow anybody on this system to get access to the exported folder as you! + +First install nfs server: + +``` +apt-get install nfs-kernel-server +``` + +Next export your home directory: + +``` +echo -e "$PWD\t127.0.0.1(rw,no_subtree_check,all_squash,anonuid=$(id -u),anongid=$(id -g))" | sudo tee -a /etc/exports +``` + +And export the filesystem. + +``` +sudo exportfs -va +``` + +At this point you have exported your home directory, only to your local machine. All files written to that exported filesystem will be owned by you (`id -u`) and your primary group (`id -g`). + +Finally we can modify the `docker-compose.override.yml` file to allow for writing files to your PEcAn folder as you: + +``` +volumes: + pecan_home: + driver_opts: + type: "nfs" + device: ":${PWD}" + o: "addr=127.0.0.1" +``` diff --git a/actions/book.yml b/actions/book.yml new file mode 100644 index 00000000000..933b3174c34 --- /dev/null +++ b/actions/book.yml @@ -0,0 +1,31 @@ +# This is a basic workflow to help you get started with Actions + +name: CI + +on: + push: + branches: master + pull_request: + branches: master + +# A workflow run is made up of one or more jobs that can run sequentially or in parallel +jobs: + # This workflow contains a single job called "build" + build: + # The type of runner that the job will run on + runs-on: ubuntu-latest + container: pecan/depends:develop + + steps: + # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it + - uses: actions/checkout@v2 + + - name: Building book from source + run: cd book_source && make + + # Runs a set of commands using the runners shell + - name: Run a multi-line script + run: | + echo Add other actions to build, + echo test, and deploy your project. + diff --git a/apps/api/Dockerfile b/apps/api/Dockerfile new file mode 100644 index 00000000000..abec70cff90 --- /dev/null +++ b/apps/api/Dockerfile @@ -0,0 +1,30 @@ +# this needs to be at the top, what version are we building +ARG IMAGE_VERSION="latest" + + +# -------------------------------------------------------------------------- +# PECAN FOR MODEL BASE IMAGE +# -------------------------------------------------------------------------- +FROM pecan/base:${IMAGE_VERSION} +LABEL maintainer="Tezan Sahu " + + +COPY ./ /api + +WORKDIR /api/R + +# -------------------------------------------------------------------------- +# Variables to store in docker image (most of them come from the base image) +# -------------------------------------------------------------------------- +ENV AUTH_REQ="yes" \ + HOST_ONLY="no" \ + PGHOST="postgres" + +# COMMAND TO RUN +RUN apt-get update +RUN apt-get install libsodium-dev -y +RUN Rscript -e "devtools::install_version('promises', '1.1.0', repos = 'http://cran.rstudio.com')" \ + && Rscript -e "devtools::install_version('webutils', '1.1', repos = 'http://cran.rstudio.com')" \ + && Rscript -e "devtools::install_github('rstudio/swagger')" \ + && Rscript -e "devtools::install_github('rstudio/plumber')" +CMD Rscript entrypoint.R \ No newline at end of file diff --git a/apps/api/R/auth.R b/apps/api/R/auth.R new file mode 100644 index 00000000000..6cf3114a7e3 --- /dev/null +++ b/apps/api/R/auth.R @@ -0,0 +1,81 @@ +library(dplyr) + +#* Obtain the encrypted password for a user +#* @param username Username, which is also the 'salt' +#* @param password Unencrypted password +#* @param secretkey Secret Key, which if null, is set to 'notasecret' +#* @return Encrypted password +#* @author Tezan Sahu +get_crypt_pass <- function(username, password, secretkey = NULL) { + secretkey <- if(is.null(secretkey)) "notasecret" else secretkey + dig <- secretkey + salt <- username + for (i in 1:10) { + dig <- digest::digest( + paste(dig, salt, password, secretkey, sep="--"), + algo="sha1", + serialize=FALSE + ) + } + return(dig) +} + + + +#* Check if the encrypted password for the user is valid +#* @param username Username +#* @param crypt_pass Encrypted password +#* @return TRUE if encrypted password is correct, else FALSE +#* @author Tezan Sahu +validate_crypt_pass <- function(username, crypt_pass) { + + dbcon <- PEcAn.DB::betyConnect() + + res <- tbl(dbcon, "users") %>% + filter(login == username, + crypted_password == crypt_pass) %>% + count() %>% + collect() + + PEcAn.DB::db.close(dbcon) + + if (res == 1) { + return(TRUE) + } + + return(FALSE) +} + +#* Filter to authenticate a user calling the PEcAn API +#* @param req The request +#* @param res The response to be set +#* @return Appropriate response +#* @author Tezan Sahu +authenticate_user <- function(req, res) { + # If the API endpoint that do not require authentication + if ( + grepl("swagger", req$PATH_INFO, ignore.case = TRUE) || + grepl("openapi.json", req$PATH_INFO, fixed = TRUE) || + grepl("ping", req$PATH_INFO, ignore.case = TRUE) || + grepl("status", req$PATH_INFO, ignore.case = TRUE)) + { + return(plumber::forward()) + } + + if (!is.null(req$HTTP_AUTHORIZATION)) { + # HTTP_AUTHORIZATION is of the form "Basic ", + # where the is contains : + auth_details <- strsplit(rawToChar(jsonlite::base64_dec(strsplit(req$HTTP_AUTHORIZATION, " +")[[1]][2])), ":")[[1]] + username <- auth_details[1] + password <- auth_details[2] + crypt_pass <- get_crypt_pass(username, password) + + if(validate_crypt_pass(username, crypt_pass)){ + return(plumber::forward()) + } + + } + + res$status <- 401 # Unauthorized + return(list(error="Authentication required")) +} \ No newline at end of file diff --git a/apps/api/R/entrypoint.R b/apps/api/R/entrypoint.R new file mode 100644 index 00000000000..fc3ed32d604 --- /dev/null +++ b/apps/api/R/entrypoint.R @@ -0,0 +1,37 @@ +#* This is the entry point to the PEcAn API. +#* All API endpoints (& filters) are mounted here +#* @author Tezan Sahu + +source("auth.R") +source("general.R") + +root <- plumber::plumber$new() +root$setSerializer(plumber::serializer_unboxed_json()) + +# Filter for authenticating users trying to hit the API endpoints +root$filter("require-auth", authenticate_user) + +# The /api/ping & /api/status are standalone API endpoints +# implemented using handle() because of restrictions of plumber +# to mount multiple endpoints on the same path (or subpath) +root$handle("GET", "/api/ping", ping) +root$handle("GET", "/api/status", status) + +# The endpoints mounted here are related to details of PEcAn models +models_pr <- plumber::plumber$new("models.R") +root$mount("/api/models", models_pr) + +# The endpoints mounted here are related to details of PEcAn workflows +workflows_pr <- plumber::plumber$new("workflows.R") +root$mount("/api/workflows", workflows_pr) + +# The endpoints mounted here are related to details of PEcAn runs +runs_pr <- plumber::plumber$new("runs.R") +root$mount("/api/runs", runs_pr) + +# The API server is bound to 0.0.0.0 on port 8000 +# The Swagger UI for the API draws its source from the pecanapi-spec.yml file +root$run(host="0.0.0.0", port=8000, debug=TRUE, swagger = function(pr, spec, ...) { + spec <- yaml::read_yaml("../pecanapi-spec.yml") + spec +}) \ No newline at end of file diff --git a/apps/api/R/general.R b/apps/api/R/general.R new file mode 100644 index 00000000000..5b8fcc7fd67 --- /dev/null +++ b/apps/api/R/general.R @@ -0,0 +1,30 @@ +#* Function to be executed when /api/ping endpoint is called +#* If successful connection to API server is established, this function will return the "pong" message +#* @return Mapping containing response as "pong" +#* @author Tezan Sahu +ping <- function(req){ + res <- list(request="ping", response="pong") + res +} + +#* Function to get the status & basic information about the Database Host +#* @return Details about the database host +#* @author Tezan Sahu +status <- function() { + + ## helper function to obtain environment variables + get_env_var = function (item, default = "unknown") { + value = Sys.getenv(item) + if (value == "") default else value + } + + dbcon <- PEcAn.DB::betyConnect() + res <- list(host_details = PEcAn.DB::dbHostInfo(dbcon)) + + res$pecan_details <- list( + version = get_env_var("PECAN_VERSION"), + branch = get_env_var("PECAN_GIT_BRANCH"), + gitsha1 = get_env_var("PECAN_GIT_CHECKSUM") + ) + return(res) +} \ No newline at end of file diff --git a/apps/api/R/models.R b/apps/api/R/models.R new file mode 100644 index 00000000000..ef8da6582cd --- /dev/null +++ b/apps/api/R/models.R @@ -0,0 +1,42 @@ +library(dplyr) + +#' Retrieve the details of a particular version of a model +#' @param name Model name (character) +#' @param revision Model version/revision (character) +#' @return Model details +#' @author Tezan Sahu +#* @get / +getModels <- function(model_name="all", revision="all", res){ + + dbcon <- PEcAn.DB::betyConnect() + + Models <- tbl(dbcon, "models") %>% + select(model_id = id, model_name, revision, modeltype_id) + + if (model_name != "all"){ + Models <- Models %>% + filter(model_name == !!model_name) + } + + if (revision != "all"){ + Models <- Models %>% + filter(revision == !!revision) + } + + Models <- tbl(dbcon, "modeltypes") %>% + select(modeltype_id = id, model_type = name) %>% + inner_join(Models, by = "modeltype_id") %>% + arrange(model_id) + + qry_res <- Models %>% collect() + + PEcAn.DB::db.close(dbcon) + + if (nrow(qry_res) == 0) { + res$status <- 404 + return(list(error="Model(s) not found")) + } + else { + return(list(models=qry_res)) + } +} \ No newline at end of file diff --git a/apps/api/R/runs.R b/apps/api/R/runs.R new file mode 100644 index 00000000000..c3daab5fa53 --- /dev/null +++ b/apps/api/R/runs.R @@ -0,0 +1,105 @@ +#' Get the list of runs (belonging to a particuar workflow) +#' @param workflow_id Workflow id (character) +#' @param offset +#' @param limit +#' @return List of runs (belonging to a particuar workflow) +#' @author Tezan Sahu +#* @get / +getWorkflows <- function(req, workflow_id, offset=0, limit=50, res){ + if (! limit %in% c(10, 20, 50, 100, 500)) { + res$status <- 400 + return(list(error = "Invalid value for parameter")) + } + + dbcon <- PEcAn.DB::betyConnect() + + Runs <- tbl(dbcon, "runs") %>% + select(id, model_id, site_id, parameter_list, ensemble_id, start_time, finish_time) + + Runs <- tbl(dbcon, "ensembles") %>% + select(runtype, ensemble_id=id, workflow_id) %>% + full_join(Runs, by="ensemble_id") %>% + filter(workflow_id == !!workflow_id) + + qry_res <- Runs %>% + arrange(id) %>% + collect() + + PEcAn.DB::db.close(dbcon) + + if (nrow(qry_res) == 0 || as.numeric(offset) >= nrow(qry_res)) { + res$status <- 404 + return(list(error="Run(s) not found")) + } + else { + has_next <- FALSE + has_prev <- FALSE + if (nrow(qry_res) > (as.numeric(offset) + as.numeric(limit))) { + has_next <- TRUE + } + if (as.numeric(offset) != 0) { + has_prev <- TRUE + } + qry_res <- qry_res[(as.numeric(offset) + 1):min((as.numeric(offset) + as.numeric(limit)), nrow(qry_res)), ] + result <- list(runs = qry_res) + result$count <- nrow(qry_res) + if(has_next){ + result$next_page <- paste0( + req$rook.url_scheme, "://", + req$HTTP_HOST, + "/api/workflows", + req$PATH_INFO, + substr(req$QUERY_STRING, 0, stringr::str_locate(req$QUERY_STRING, "offset=")[[2]]), + (as.numeric(limit) + as.numeric(offset)), + "&limit=", + limit + ) + } + if(has_prev) { + result$prev_page <- paste0( + req$rook.url_scheme, "://", + req$HTTP_HOST, + "/api/workflows", + req$PATH_INFO, + substr(req$QUERY_STRING, 0, stringr::str_locate(req$QUERY_STRING, "offset=")[[2]]), + max(0, (as.numeric(offset) - as.numeric(limit))), + "&limit=", + limit + ) + } + + return(result) + } +} + +################################################################################################# + +#' Get the of the run specified by the id +#' @param id Run id (character) +#' @return Details of requested run +#' @author Tezan Sahu +#* @get / +getWorkflowDetails <- function(id, res){ + + dbcon <- PEcAn.DB::betyConnect() + + Runs <- tbl(dbcon, "runs") %>% + select(-outdir, -outprefix, -setting) + + Runs <- tbl(dbcon, "ensembles") %>% + select(runtype, ensemble_id=id, workflow_id) %>% + full_join(Runs, by="ensemble_id") %>% + filter(id == !!id) + + qry_res <- Runs %>% collect() + + PEcAn.DB::db.close(dbcon) + + if (nrow(qry_res) == 0) { + res$status <- 404 + return(list(error="Run with specified ID was not found")) + } + else { + return(qry_res) + } +} \ No newline at end of file diff --git a/apps/api/R/workflows.R b/apps/api/R/workflows.R new file mode 100644 index 00000000000..f6007a342e7 --- /dev/null +++ b/apps/api/R/workflows.R @@ -0,0 +1,128 @@ +library(dplyr) + +#' Get the list of workflows (using a particular model & site, if specified) +#' @param model_id Model id (character) +#' @param site_id Site id (character) +#' @param offset +#' @param limit +#' @return List of workflows (using a particular model & site, if specified) +#' @author Tezan Sahu +#* @get / +getWorkflows <- function(req, model_id=NULL, site_id=NULL, offset=0, limit=50, res){ + if (! limit %in% c(10, 20, 50, 100, 500)) { + res$status <- 400 + return(list(error = "Invalid value for parameter")) + } + + dbcon <- PEcAn.DB::betyConnect() + + Workflow <- tbl(dbcon, "workflows") %>% + select(id, model_id, site_id) + + Workflow <- tbl(dbcon, "attributes") %>% + select(id = container_id, properties = value) %>% + full_join(Workflow, by = "id") + + if (!is.null(model_id)) { + Workflow <- Workflow %>% + filter(model_id == !!model_id) + } + + if (!is.null(site_id)) { + Workflow <- Workflow %>% + filter(site_id == !!site_id) + } + + qry_res <- Workflow %>% + select(-model_id, -site_id) %>% + arrange(id) %>% + collect() + + PEcAn.DB::db.close(dbcon) + + if (nrow(qry_res) == 0 || as.numeric(offset) >= nrow(qry_res)) { + res$status <- 404 + return(list(error="Workflows not found")) + } + else { + has_next <- FALSE + has_prev <- FALSE + if (nrow(qry_res) > (as.numeric(offset) + as.numeric(limit))) { + has_next <- TRUE + } + if (as.numeric(offset) != 0) { + has_prev <- TRUE + } + + qry_res <- qry_res[(as.numeric(offset) + 1):min((as.numeric(offset) + as.numeric(limit)), nrow(qry_res)), ] + + qry_res$properties[is.na(qry_res$properties)] = "{}" + qry_res$properties <- purrr::map(qry_res$properties, jsonlite::parse_json) + result <- list(workflows = qry_res) + result$count <- nrow(qry_res) + if(has_next){ + result$next_page <- paste0( + req$rook.url_scheme, "://", + req$HTTP_HOST, + "/api/workflows", + req$PATH_INFO, + substr(req$QUERY_STRING, 0, stringr::str_locate(req$QUERY_STRING, "offset=")[[2]]), + (as.numeric(limit) + as.numeric(offset)), + "&limit=", + limit + ) + } + if(has_prev) { + result$prev_page <- paste0( + req$rook.url_scheme, "://", + req$HTTP_HOST, + "/api/workflows", + req$PATH_INFO, + substr(req$QUERY_STRING, 0, stringr::str_locate(req$QUERY_STRING, "offset=")[[2]]), + max(0, (as.numeric(offset) - as.numeric(limit))), + "&limit=", + limit + ) + } + + return(result) + } +} + +################################################################################################# + +#' Get the of the workflow specified by the id +#' @param id Workflow id (character) +#' @return Details of requested workflow +#' @author Tezan Sahu +#* @get / +getWorkflowDetails <- function(id, res){ + dbcon <- PEcAn.DB::betyConnect() + + Workflow <- tbl(dbcon, "workflows") %>% + select(id, model_id, site_id) + + Workflow <- tbl(dbcon, "attributes") %>% + select(id = container_id, properties = value) %>% + full_join(Workflow, by = "id") %>% + filter(id == !!id) + + qry_res <- Workflow %>% collect() + + PEcAn.DB::db.close(dbcon) + + if (nrow(qry_res) == 0) { + res$status <- 404 + return(list(error="Workflow with specified ID was not found")) + } + else { + if(is.na(qry_res$properties)){ + res <- list(id = id, properties = list(modelid = qry_res$model_id, siteid = qry_res$site_id)) + } + else{ + res <- list(id = id, properties = jsonlite::parse_json(qry_res$properties[[1]])) + } + + return(res) + } +} \ No newline at end of file diff --git a/apps/api/README.md b/apps/api/README.md new file mode 100644 index 00000000000..69ae57643fc --- /dev/null +++ b/apps/api/README.md @@ -0,0 +1,20 @@ +# PEcAn RESTful API Server + +This folder contains the code & tests for PEcAn's RESTful API server. The API allows users to remotely interact with the PEcAn servers and leverage the functionalities provided by the PEcAn Project. It has been designed to follow common RESTful API conventions. Most operations are performed using the HTTP methods: `GET` (retrieve) & `POST` (create). + +#### For the most up-to-date documentation, you can visit the [PEcAn API Documentation](https://petstore.swagger.io/?url=https://raw.githubusercontent.com/tezansahu/pecan/api_1/apps/api/pecanapi-spec.yml). + +## Starting the PEcAn server: + +Follow the following steps to spin up the PEcAn API server locally: + +```bash +$ cd R +$ Rscript entrypoint.R +``` + +## Running the tests: + +```bash +$ ./test_pecanapi.sh +``` diff --git a/apps/api/pecanapi-spec.yml b/apps/api/pecanapi-spec.yml new file mode 100644 index 00000000000..a5d0ffc73e8 --- /dev/null +++ b/apps/api/pecanapi-spec.yml @@ -0,0 +1,442 @@ +openapi: 3.0.0 +servers: + - description: PEcAn Tezan VM + url: https://pecan-tezan-rstudio.ncsa.illinois.edu/p/912db446 + - description: Localhost + url: http://127.0.0.1:8000 + +info: + title: PEcAn Project API + description: >- + This is the API for interacting with server(s) of the __PEcAn Project__. The Predictive Ecosystem Analyser (PEcAn) Project is an open source framework initiated to meet the demands for more accessible, transparent & repeatable modeling of ecosystems. Here's the link to [PEcAn's Github Repository](https://github.com/PecanProject/pecan). + PEcAn can be considered as an ecoinformatics toolbox combined with a set of workflows that wrap around ecosystem models that allow users to effectively perform data synthesis, propagation of uncertainty through a model & ecological predictions in an integrated fashion using a diverse repository of data & models. + version: "1.0.0" + contact: + email: "pecanproj@gmail.com" + license: + name: University of Illinois/NCSA Open Source License + url: https://opensource.org/licenses/NCSA +externalDocs: + description: Find out more about PEcAn Project + url: https://pecanproject.github.io/ + +tags: + - name: general + description: Related to the overall working on the API, details of PEcAn & the server + - name: workflows + description: Everything about PEcAn workflows + - name: runs + description: Everything about PEcAn runs + - name: models + description: Everything about PEcAn models + +##################################################################################################################### +##################################################### API Endpoints ################################################# +##################################################################################################################### +security: + - basicAuth: [] + +paths: + + /api/ping: + get: + summary: Ping the server to check if it is live + tags: + - general + - + responses: + '200': + description: OK + content: + application/json: + schema: + type: object + properties: + req: + type: string + resp: + type: string + '403': + description: Access forbidden + '404': + description: Models not found + + /api/status: + get: + summary: Obtain general information about PEcAn & the details of the database host + tags: + - general + - + responses: + '200': + description: OK + content: + application/json: + schema: + type: object + properties: + pecan_details: + type: object + properties: + version: + type: string + branch: + type: string + gitsha1: + type: string + host_details: + type: object + properties: + hostid: + type: string + hostname: + type: string + start: + type: string + end: + type: string + sync_url: + type: string + sync_contact: + type: string + + '403': + description: Access forbidden + '404': + description: Models not found + + /api/models/: + get: + tags: + - models + - + summary: Details of model(s) + parameters: + - in: query + name: model_name + description: Name of the model + required: false + schema: + type: string + - in: query + name: revision + description: Model version/revision + required: false + schema: + type: string + responses: + '200': + description: Available Models + content: + application/json: + schema: + type: object + properties: + models: + type: array + items: + type: object + $ref: '#/components/schemas/Model' + '401': + description: Authentication required + '403': + description: Access forbidden + '404': + description: Model(s) not found + + + /api/workflows/: + get: + tags: + - workflows + - + summary: Get the list of workflows + parameters: + - in: query + name: model_id + description: If provided, returns all workflows that use the provided model_id + required: false + schema: + type: string + - in: query + name: site_id + description: If provided, returns all workflows that use the provided site_id + required: false + schema: + type: string + - in: query + name: offset + description: The number of workflows to skip before starting to collect the result set. + schema: + type: integer + minimum: 0 + default: 0 + required: false + - in: query + name: limit + description: The number of workflows to return. + schema: + type: integer + default: 50 + enum: + - 10 + - 20 + - 50 + - 100 + - 500 + required: false + responses: + '200': + description: List of workflows + content: + application/json: + schema: + type: object + properties: + workflows: + type: array + items: + type: object + $ref: '#/components/schemas/Workflow' + count: + type: integer + next_page: + type: string + prev_page: + type: string + + '401': + description: Authentication required + '403': + description: Access forbidden + '404': + description: Workflows not found + + post: + tags: + - workflows + - + summary: Submit a new PEcAn workflow + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/Workflow' + responses: + '201': + description: Submitted workflow successfully + '401': + description: Authentication required + + + /api/workflows/{id}: + get: + tags: + - workflows + - + summary: Get the details of a PEcAn Workflow + parameters: + - in: path + name: id + description: ID of the PEcAn Workflow + required: true + schema: + type: string + responses: + '200': + description: Details of the requested PEcAn Workflow + content: + application/json: + schema: + $ref: '#/components/schemas/Workflow' + '401': + description: Authentication required + '403': + description: Access forbidden + '404': + description: Workflow with specified ID was not found + + + + + + /api/runs/: + get: + tags: + - runs + - + summary: Get the list of all runs for a specified PEcAn Workflow + parameters: + - in: query + name: workflow_id + description: ID of the PEcAn Workflow + required: true + schema: + type: string + - in: query + name: offset + description: The number of workflows to skip before starting to collect the result set. + schema: + type: integer + minimum: 0 + default: 0 + required: false + - in: query + name: limit + description: The number of workflows to return. + schema: + type: integer + default: 50 + enum: + - 10 + - 20 + - 50 + - 100 + - 500 + required: false + responses: + '200': + description: List of all runs for the requested PEcAn Workflow + content: + application/json: + schema: + type: object + properties: + runs: + type: array + items: + type: object + $ref: '#/components/schemas/Run' + count: + type: integer + next_page: + type: string + prev_page: + type: string + '401': + description: Authentication required + '403': + description: Access forbidden + '404': + description: Workflow with specified ID was not found + + + /api/runs/{id}: + get: + tags: + - runs + - + summary: Get the details of a specified PEcAn run + parameters: + - in: path + name: id + description: ID of the PEcAn run + required: true + schema: + type: string + responses: + '200': + description: Details about the requested run within the requested PEcAn workflow + content: + application/json: + schema: + $ref: '#/components/schemas/Run' + '401': + description: Authentication required + '403': + description: Access forbidden + '404': + description: Workflow with specified ID was not found + + +##################################################################################################################### +###################################################### Components ################################################### +##################################################################################################################### + +components: + schemas: + Model: + properties: + model_id: + type: string + model_name: + type: string + revision: + type: string + modeltype_id: + type: string + model_type: + type: string + + Run: + properties: + id: + type: string + workflow_id: + type: string + runtype: + type: string + ensemble_id: + type: string + model_id: + type: string + site_id: + type: string + parameter_list: + type: string + start_time: + type: string + finish_time: + type: string + + Workflow: + properties: + id: + type: string + "properties": + type: object + properties: + pfts: + type: array + items: + type: string + input_met: + type: string + modelid: + type: string + siteid: + type: string + sitename: + type: string + sitegroupid: + type: string + start: + type: string + end: + type: string + variables: + type: string + sensitivity: + type: string + email: + type: string + notes: + type: string + runs: + type: string + pecan_edit: + type: string + status: + type: string + fluxusername: + type: string + input_poolinitcond: + type: string + securitySchemes: + basicAuth: + type: http + scheme: basic + + + diff --git a/apps/api/test_pecanapi.sh b/apps/api/test_pecanapi.sh new file mode 100644 index 00000000000..35543046251 --- /dev/null +++ b/apps/api/test_pecanapi.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +R R/entrypoint.R & +PID=$! + +R test/alltests.R +kill $PID \ No newline at end of file diff --git a/base/db/R/query.dplyr.R b/base/db/R/query.dplyr.R index 4882ed53c0a..b08751f1337 100644 --- a/base/db/R/query.dplyr.R +++ b/base/db/R/query.dplyr.R @@ -4,19 +4,32 @@ #' betyConnect <- function(php.config = "../../web/config.php") { ## Read PHP config file for webserver + if (file.exists(php.config)) { + php_params <- PEcAn.utils::read_web_config(php.config) + } else { + php_params <- list() + } + + ## helper function + getphp = function (item, default = "") { + value = php_params[[item]] + if (is.null(value)) default else value + } - config.list <- PEcAn.utils::read_web_config(php.config) + ## fill in all data from environment variables + dbparams <- get_postgres_envvars(host = getphp("db_bety_hostname", "localhost"), + port = getphp("db_bety_port", "5432"), + dbname = getphp("db_bety_database", "bety"), + user = getphp("db_bety_username", "bety"), + password = getphp("db_bety_password", "bety")) + + ## force driver to be postgres (only value supported by db.open) + dbparams[["driver"]] <- "Postgres" ## Database connection - # TODO: The latest version of dplyr/dbplyr works with standard DBI-based - # objects, so we should replace this with a standard `db.open` call. - dplyr::src_postgres(dbname = config.list$db_bety_database, - host = config.list$db_bety_hostname, - user = config.list$db_bety_username, - password = config.list$db_bety_password) + db.open(dbparams) } # betyConnect - #' Convert number to scientific notation pretty expression #' @param l Number to convert to scientific notation #' @export @@ -59,22 +72,28 @@ ncdays2date <- function(time, unit) { #' @export dbHostInfo <- function(bety) { # get host id - result <- db.query(query = "select cast(floor(nextval('users_id_seq') / 1e9) as bigint);", con = bety$con) + result <- db.query(query = "select cast(floor(nextval('users_id_seq') / 1e9) as bigint);", con = bety) hostid <- result[["floor"]] # get machine start and end based on hostid machine <- dplyr::tbl(bety, "machines") %>% - dplyr::filter(sync_host_id == !!hostid) %>% - dplyr::select(sync_start, sync_end) + dplyr::filter(sync_host_id == !!hostid) + if (is.na(nrow(machine)) || nrow(machine) == 0) { return(list(hostid = hostid, + hostname = "", start = 1e+09 * hostid, - end = 1e+09 * (hostid + 1) - 1)) + end = 1e+09 * (hostid + 1) - 1, + sync_url = "", + sync_contact = "")) } else { return(list(hostid = hostid, + hostname = machine$hostname, start = machine$sync_start, - end = machine$sync_end)) + end = machine$sync_end, + sync_url = machine$sync_url, + sync_contact = machine$sync_contact)) } } # dbHostInfo diff --git a/base/db/R/query.format.vars.R b/base/db/R/query.format.vars.R index 79f88eba8d8..61ba7cf3e0e 100644 --- a/base/db/R/query.format.vars.R +++ b/base/db/R/query.format.vars.R @@ -13,8 +13,6 @@ query.format.vars <- function(bety, input.id=NA, format.id=NA, var.ids=NA) { PEcAn.logger::logger.error("Must specify input id or format id") } - con <- bety$con - # get input info either form input.id or format.id, depending which is provided # defaults to format.id if both provided # also query site information (id/lat/lon) if an input.id @@ -27,9 +25,9 @@ query.format.vars <- function(bety, input.id=NA, format.id=NA, var.ids=NA) { if (is.na(format.id)) { f <- PEcAn.DB::db.query( query = paste("SELECT * from formats as f join inputs as i on f.id = i.format_id where i.id = ", input.id), - con = con + con = bety ) - site.id <- PEcAn.DB::db.query(query = paste("SELECT site_id from inputs where id =", input.id), con = con) + site.id <- PEcAn.DB::db.query(query = paste("SELECT site_id from inputs where id =", input.id), con = bety) if (is.data.frame(site.id) && nrow(site.id)>0) { site.id <- site.id$site_id site.info <- @@ -38,17 +36,17 @@ query.format.vars <- function(bety, input.id=NA, format.id=NA, var.ids=NA) { "SELECT id, time_zone, ST_X(ST_CENTROID(geometry)) AS lon, ST_Y(ST_CENTROID(geometry)) AS lat FROM sites WHERE id =", site.id ), - con = con + con = bety ) site.lat <- site.info$lat site.lon <- site.info$lon site.time_zone <- site.info$time_zone } } else { - f <- PEcAn.DB::db.query(query = paste("SELECT * from formats where id = ", format.id), con = con) + f <- PEcAn.DB::db.query(query = paste("SELECT * from formats where id = ", format.id), con = bety) } - mimetype <- PEcAn.DB::db.query(query = paste("SELECT * from mimetypes where id = ", f$mimetype_id), con = con)[["type_string"]] + mimetype <- PEcAn.DB::db.query(query = paste("SELECT * from mimetypes where id = ", f$mimetype_id), con = bety)[["type_string"]] f$mimetype <- utils::tail(unlist(strsplit(mimetype, "/")),1) # get variable names and units of input data @@ -56,7 +54,7 @@ query.format.vars <- function(bety, input.id=NA, format.id=NA, var.ids=NA) { query = paste( "SELECT variable_id,name,unit,storage_type,column_number from formats_variables where format_id = ", f$id ), - con = con + con = bety ) if(all(!is.na(var.ids))){ @@ -84,7 +82,7 @@ query.format.vars <- function(bety, input.id=NA, format.id=NA, var.ids=NA) { vars_bety[i, (ncol(vars_bety) - 1):ncol(vars_bety)] <- as.matrix(PEcAn.DB::db.query( query = paste("SELECT name, units from variables where id = ", fv$variable_id[i]), - con = con + con = bety )) } diff --git a/base/logger/DESCRIPTION b/base/logger/DESCRIPTION index 8d2cd9ab22d..d78726612f1 100644 --- a/base/logger/DESCRIPTION +++ b/base/logger/DESCRIPTION @@ -11,5 +11,5 @@ Suggests: testthat License: BSD_3_clause + file LICENSE Encoding: UTF-8 LazyData: true -RoxygenNote: 7.0.2 +RoxygenNote: 7.1.0 Roxygen: list(markdown = TRUE) diff --git a/base/remote/DESCRIPTION b/base/remote/DESCRIPTION index f7a1174e619..7e0fd00bcbc 100644 --- a/base/remote/DESCRIPTION +++ b/base/remote/DESCRIPTION @@ -21,4 +21,4 @@ License: BSD_3_clause + file LICENSE Encoding: UTF-8 LazyData: true Roxygen: list(markdown = TRUE) -RoxygenNote: 7.0.2 +RoxygenNote: 7.1.0 diff --git a/base/settings/R/check.all.settings.R b/base/settings/R/check.all.settings.R index c8e12950de0..44c5dd7465c 100644 --- a/base/settings/R/check.all.settings.R +++ b/base/settings/R/check.all.settings.R @@ -835,7 +835,7 @@ check.model.settings <- function(settings, dbcon = NULL) { con = dbcon) if (nrow(model) > 1) { PEcAn.logger::logger.warn( - "multiple records for", settings$model$name, + "multiple records for", settings$model$type, "returned; using the latest") row <- which.max(model$updated_at) if (length(row) == 0) row <- nrow(model) diff --git a/base/utils/R/sensitivity.R b/base/utils/R/sensitivity.R index 72f441fb3e8..da87d48a069 100644 --- a/base/utils/R/sensitivity.R +++ b/base/utils/R/sensitivity.R @@ -261,7 +261,7 @@ write.sa.configs <- function(defaults, quantile.samples, settings, model, settings$run$site$id, "', '", settings$run$start.date, "', '", settings$run$end.date, "', '", - settings$run$outdir, "', '", + settings$run$outdir, "', ", ensemble.id, ", '", paramlist, "') RETURNING id"), con = con) run.id <- insert_result[["id"]] @@ -270,8 +270,7 @@ write.sa.configs <- function(defaults, quantile.samples, settings, model, for (pft in defaults) { PEcAn.DB::db.query(paste0("INSERT INTO posteriors_ensembles (posterior_id, ensemble_id) values (", pft$posteriorid, ", ", - ensemble.id, ", '", - "');"), con = con) + ensemble.id, ");"), con = con) } # associate inputs with runs diff --git a/base/workflow/inst/batch_run.R b/base/workflow/inst/batch_run.R index 9a5c561fe21..63940dae476 100755 --- a/base/workflow/inst/batch_run.R +++ b/base/workflow/inst/batch_run.R @@ -50,7 +50,6 @@ php_file <- file.path(pecan_path, "web", "config.php") stopifnot(file.exists(php_file)) config.list <- PEcAn.utils::read_web_config(php_file) bety <- PEcAn.DB::betyConnect(php_file) -con <- bety$con # Create outfile directory if it doesn't exist dir.create(dirname(outfile), recursive = TRUE, showWarnings = FALSE) @@ -73,7 +72,7 @@ for (i in seq_len(nrow(input_table))) { revision <- table_row$revision message("Model: ", shQuote(model)) message("Revision: ", shQuote(revision)) - model_df <- tbl(con, "models") %>% + model_df <- tbl(bety, "models") %>% filter(model_name == !!model, revision == !!revision) %>% collect() diff --git a/base/workflow/inst/permutation_tests.R b/base/workflow/inst/permutation_tests.R index 69af5f96598..5821ea28980 100755 --- a/base/workflow/inst/permutation_tests.R +++ b/base/workflow/inst/permutation_tests.R @@ -31,7 +31,6 @@ php_file <- file.path(pecan_path, "web", "config.php") stopifnot(file.exists(php_file)) config.list <- PEcAn.utils::read_web_config(php_file) bety <- PEcAn.DB::betyConnect(php_file) -con <- bety$con # Create path for outfile dir.create(dirname(outfile), showWarnings = FALSE, recursive = TRUE) diff --git a/book_source/02_demos_tutorials_workflows/01_install_pecan.Rmd b/book_source/02_demos_tutorials_workflows/01_install_pecan.Rmd index fcc3f4cb37d..9c8801408da 100644 --- a/book_source/02_demos_tutorials_workflows/01_install_pecan.Rmd +++ b/book_source/02_demos_tutorials_workflows/01_install_pecan.Rmd @@ -80,7 +80,7 @@ This will not go into much detail about about how to use Docker -- for more deta This should print the current version of docker-compose. We have tested the instruction below with versions of docker-compose 1.22 and above. -3. **Download the PEcAn docker-compose file**. It is located in the root directory of the [PEcAn source code](https://github.com/pecanproject/pecan). For reference, here are direct links to the [latest stable version](https://raw.githubusercontent.com/PecanProject/pecan/master/docker-compose.yml) and the [bleeding edge development version](https://raw.githubusercontent.com/PecanProject/pecan/master/docker-compose.yml). (To download the files, you should be able to right click the link and select "Save link as".) Make sure the file is saved as `docker-compose.yml` in a directory called `pecan`. +3. **Download the PEcAn docker-compose file**. It is located in the root directory of the [PEcAn source code](https://github.com/pecanproject/pecan). For reference, here are direct links to the [latest stable version](https://raw.githubusercontent.com/PecanProject/pecan/master/docker-compose.yml) and the [bleeding edge development version](https://raw.githubusercontent.com/PecanProject/pecan/develop/docker-compose.yml). (To download the files, you should be able to right click the link and select "Save link as".) Make sure the file is saved as `docker-compose.yml` in a directory called `pecan`. 4. **Initialize the PEcAn database and data images**. The following `docker-compose` commands are used to download all the data PEcAn needs to start working. For more on how they work, see our [Docker topical pages](#pecan-docker-quickstart-init). @@ -99,22 +99,25 @@ This will not go into much detail about about how to use Docker -- for more deta b. "Initialize" the data for the PEcAn database. ```bash - docker-compose run --rm bety initialize + docker run --rm --network pecan_pecan pecan/db ``` This should produce a lot of output describing the database operations happening under the hood. - Some of these will look like errors (including starting with `ERROR`), but _this is normal_. - This command succeeded if the output ends with the following: + Some of these will look like errors, but _this is normal_. + This command succeeded if the output ends with the following (the syntax for creating a new user for accessing BetyDB): ``` - Added carya41 with access_level=4 and page_access_level=1 with id=323 - Added carya42 with access_level=4 and page_access_level=2 with id=325 - Added carya43 with access_level=4 and page_access_level=3 with id=327 - Added carya44 with access_level=4 and page_access_level=4 with id=329 - Added guestuser with access_level=4 and page_access_level=4 with id=331 + docker-compose run bety user 'login' 'password' 'full name' 'email' 1 1 ``` - - c. Download and configure the core PEcAn database files. + c. Add a user to BetyDB using the example syntax provided as the last line of the output of the previous step: + ```bash + # guest user + docker-compose run --rm bety user guestuser guestuser "Guest User" guestuser@example.com 4 4 + + # example user + docker-compose run --rm bety user carya illinois "Carya Demo User" carya@example.com 1 1 + ``` + d. Download and configure the core PEcAn database files. ```bash docker run -ti --rm --network pecan_pecan --volume pecan_pecan:/data --env FQDN=docker pecan/data:develop @@ -128,7 +131,16 @@ This will not go into much detail about about how to use Docker -- for more deta Done! ###################################################################### ``` + e. Download the [`pecan/docker/env.example`](https://raw.githubusercontent.com/PecanProject/pecan/develop/docker/env.example) & save it as `.env` file. + Now, open the `.env` file & uncomment the lines mentioned below: + ```{r, echo=FALSE, fig.align='center'} + knitr::include_graphics(rep("figures/env-file.PNG")) + ``` + + Setting `PECAN_VERSION=develop` indicates that you want to run the bleeding-edge `develop` branch, meaning it may have bugs. To go ahead with the stable version you may set `PECAN_VERSION=latest` or `PECAN_VERSION=` (For example `1.7.0`). You can look at the list of all the [releases](https://github.com/pecanproject/pecan/releases) of PEcAn to see what options are availble. + + 5. **Start the PEcAn stack**. Assuming all of the steps above completed successfully, start the full stack by running the following shell command: ```bash diff --git a/book_source/03_topical_pages/07_remote_access/01_pecan_api.Rmd b/book_source/03_topical_pages/07_remote_access/01_pecan_api.Rmd new file mode 100644 index 00000000000..4a69f62d495 --- /dev/null +++ b/book_source/03_topical_pages/07_remote_access/01_pecan_api.Rmd @@ -0,0 +1,475 @@ +# PEcAn Project API + +## Introduction + +##### Welcome to the PEcAn Project API Documentation. + +The Predictive Ecosystem Analyser (PEcAn) Project is an open source framework initiated to meet the demands for more accessible, transparent & repeatable modeling of ecosystems. PEcAn can be considered as an ecoinformatics toolbox combined with a set of workflows that wrap around ecosystem models that allow users to effectively perform data synthesis, propagation of uncertainty through a model & ecological predictions in an integrated fashion using a diverse repository of data & models. + +Our API allows users to remotely interact with the PEcAn servers and leverage the functionalities provided by the PEcAn Project. It has been designed to follow common RESTful API conventions. Most operations are performed using the HTTP methods: `GET` (retrieve) & `POST` (create). + +_Please note that the PEcAn Project API is currently under active development and is possible that any information in this document is subject to change._ + +## Authentication + +Authentication to the PEcAn API occurs via [Basic HTTP Auth](https://en.wikipedia.org/wiki/Basic_access_authentication). The credentials for using the API are the same as those used to log into PEcAn & BetyDB. Here is how you use basic HTTP auth with `curl`: +``` +$ curl --user ':' +``` + +Authentication also depends on the PEcAn server that the user interacts with. Some servers, at the time of deployment have the `AUTH_REQ = FALSE`, meaning that such servers do not require user autertication for the usage of the PEcAn APIs. Regardless of the type of server, the endpoints defind under General section can be accessed without any authentication. + +## RESTful API Endpoints + +This page contains the high-level overviews & the functionalities offered by the different RESTful endpoints of the PEcAn API. + +__For the most up-to-date documentation, you can visit the [PEcAn API Documentation](https://petstore.swagger.io/?url=https://raw.githubusercontent.com/tezansahu/pecan/api_1/apps/api/pecanapi-spec.yml).__ + +The currently implemented functionalities include: + +* __General:__ + * `GET /api/ping`: Ping the server to check if it is live + * `GET /api/status`: Obtain general information about PEcAn & the details of the database host + +* __Models:__ + * `GET /api/models/`: Retrieve the details of model(s) used by PEcAn + +* __Workflows:__ + * `GET /api/workflows/`: Retrieve a list of PEcAn workflows + * `POST /api/workflows/` *: Submit a new PEcAn workflow + * `GET /api/workflows/{id}`: Obtain the details of a particular PEcAn workflow by supplying its ID + +* __Runs:__ + * `GET /api/runs`: Get the list of all the runs for a specified PEcAn Workflow + * `GET /api/runs/{id}`: Fetch the details of a specified PEcAn run + +_* indicates that the particular API is under development & may not be ready for use_ + + +## Examples: + +#### Prerequisites to interact with the PEcAn API Server {.tabset .tabset-pills} + +##### R Packages +* [httr](https://cran.r-project.org/web/packages/httr/index.html) +* [jsonlite](https://cran.r-project.org/web/packages/jsonlite/index.html) + +##### Python Packages +* [requests](https://requests.readthedocs.io/en/master/) +* [json](https://docs.python.org/3/library/json.html) + +#### {-} + + +Following are some example snippets to call the PEcAn API endpoints: + +### `GET /api/ping` {.tabset .tabset-pills} + +#### R Snippet + +```R +res <- httr::GET("http://localhost:8000/api/ping") +print(jsonlite::fromJSON(rawToChar(res$content))) +``` +``` +## $request +## [1] "ping" + +## $response +## [1] "pong" +``` +#### Python Snippet + +```python +response = requests.get("http://localhost:8000/api/ping") +print(json.dumps(response.json(), indent=2)) +``` +``` +## { +## "request": "ping", +## "response": "pong" +## } +``` +### {-} + + +### `GET /api/status` {.tabset .tabset-pills} + +#### R Snippet + +```R +res <- httr::GET("http://localhost:8000/api/status") +print(jsonlite::fromJSON(rawToChar(res$content))) +``` +``` +## $pecan_details$version +## [1] "1.7.0" + +## $pecan_details$branch +## [1] "develop" + +## $pecan_details$gitsha1 +## [1] "unknown" + +## $host_details$hostid +## [1] 99 + +## $host_details$hostname +## [1] "" + +## $host_details$start +## [1] 99000000000 + +## $host_details$end +## [1] 99999999999 + +## $host_details$sync_url +## [1] "" + +## $host_details$sync_contact +## [1] "" +``` + +#### Python Snippet + +```python +response = requests.get("http://localhost:8000/api/status") +print(json.dumps(response.json(), indent=2)) +``` +``` +## { +## "pecan_details": { +## "version": "1.7.0", +## "branch": "develop", +## "gitsha1": "unknown" +## }, +## "host_details": { +## "hostid": 99, +## "hostname": "", +## "start": 99000000000, +## "end": 99999999999, +## "sync_url": "", +## "sync_contact": "" +## } +## } +``` + +### {-} + +### `GET /api/models/` {.tabset .tabset-pills} + +#### R Snippet + +```R +# Get model(s) with `model_name` = SIPNET & `revision` = ssr +res <- httr::GET( + "http://localhost:8000/api/models/?model_name=SIPNET&revision=ssr", + authenticate("carya", "illinois") + ) +print(jsonlite::fromJSON(rawToChar(res$content))) +``` +``` +## $models +## model_id model_name revision modeltype_id model_type +## 1 1000000022 SIPNET ssr 3 SIPNET +## ... +``` + +#### Python Snippet + +```python +# Get model(s) with `model_name` = SIPNET & `revision` = ssr +response = requests.get( + "http://localhost:8000/api/models/?model_name=SIPNET&revision=ssr", + auth=HTTPBasicAuth('carya', 'illinois') + ) +print(json.dumps(response.json(), indent=2)) +``` +``` +## { +## "models": [ +## { +## "model_id": "1000000022", +## "model_name": "SIPNET", +## "revision": "ssr", +## "modeltype_id": 3, +## "model_type": "SIPNET" +## }, +## ... +## ] +## } +``` + +### {-} + +### `GET /api/workflows/` {.tabset .tabset-pills} + +#### R Snippet + +```R +# Get workflow(s) that use `model_id` = 1000000022 [SIPNET] & `site_id` = 676 [Willow Creek (US-WCr)] +res <- httr::GET( + "http://localhost:8000/api/workflows/?model_id=1000000022&site_id=676", + authenticate("carya", "illinois") + ) +print(jsonlite::fromJSON(rawToChar(res$content))) +``` +``` +## $workflows +## id properties +## +## 1 1000009172 +## ... + +## $count +## [1] 5 +``` + +#### Python Snippet + +```python +# Get workflow(s) that use `model_id` = 1000000022 [SIPNET] & `site_id` = 676 [Willow Creek (US-WCr)] +response = requests.get( + "http://localhost:8000/api/workflows/?model_id=1000000022&site_id=676", + auth=HTTPBasicAuth('carya', 'illinois') + ) +print(json.dumps(response.json(), indent=2)) +``` +``` +## { +## "workflows": [ +## { +## "id": 1000009172, +## "properties": { +## "end": "2004/12/31", +## "pft": [ +## "soil.IF", +## "temperate.deciduous.IF" +## ], +## "email": "", +## "notes": "", +## "start": "2004/01/01", +## "siteid": "676", +## "modelid": "1000000022", +## "hostname": "test-pecan.bu.edu", +## "sitename": "WillowCreek(US-WCr)", +## "input_met": "AmerifluxLBL.SIPNET", +## "pecan_edit": "on", +## "sitegroupid": "1000000022", +## "fluxusername": "pecan", +## "input_poolinitcond": "-1" +## } +## }, +## ... +## ], +## "count": 5 +## } +``` + +### {-} + +### `GET /api/workflows/{id}` {.tabset .tabset-pills} + +#### R Snippet + +```R +# Get details of workflow with `id` = '1000009172' +res <- httr::GET( + "http://localhost:8000/api/workflows/1000009172", + authenticate("carya", "illinois") + ) +print(jsonlite::fromJSON(rawToChar(res$content))) +``` +``` +## $id +## [1] "1000009172" + +## $properties +## $properties$end +## [1] "2004/12/31" + +## $properties$pft +## $properties$pft[[1]] +## [1] "soil.IF" + +## $properties$pft[[2]] +## [1] "temperate.deciduous.IF" + + +## $properties$email +## [1] "" + +## $properties$notes +## [1] "" + +## $properties$start +## [1] "2004/01/01" + +## $properties$siteid +## [1] "676" + +## $properties$modelid +## [1] "1000000022" + +## $properties$hostname +## [1] "test-pecan.bu.edu" + +## $properties$sitename +## [1] "WillowCreek(US-WCr)" + +## $properties$input_met +## [1] "AmerifluxLBL.SIPNET" + +## $properties$pecan_edit +## [1] "on" + +## $properties$sitegroupid +## [1] "1000000022" + +## $properties$fluxusername +## [1] "pecan" + +## $properties$input_poolinitcond +## [1] "-1" +``` + +#### Python Snippet + +```python +# Get details of workflow with `id` = '1000009172' +response = requests.get( + "http://localhost:8000/api/workflows/1000009172", + auth=HTTPBasicAuth('carya', 'illinois') + ) +print(json.dumps(response.json(), indent=2)) +``` +``` +## { +## "id": "1000009172", +## "properties": { +## "end": "2004/12/31", +## "pft": [ +## "soil.IF", +## "temperate.deciduous.IF" +## ], +## "email": "", +## "notes": "", +## "start": "2004/01/01", +## "siteid": "676", +## "modelid": "1000000022", +## "hostname": "test-pecan.bu.edu", +## "sitename": "WillowCreek(US-WCr)", +## "input_met": "AmerifluxLBL.SIPNET", +## "pecan_edit": "on", +## "sitegroupid": "1000000022", +## "fluxusername": "pecan", +## "input_poolinitcond": "-1" +## } +## } +``` + +### {-} + +### `GET /api/runs/` {.tabset .tabset-pills} + +#### R Snippet + +```R +# Get list of run belonging to the workflow with `workflow_id` = '1000009172' +res <- httr::GET( + "http://localhost:8000/api/runs/?workflow_id=1000009172", + authenticate("carya", "illinois") + ) +print(jsonlite::fromJSON(rawToChar(res$content))) +``` +``` +## $runs +## runtype ensemble_id workflow_id id model_id site_id parameter_list start_time +## finish_time +## 1 ensemble 1000017624 1000009172 1002042201 1000000022 796 ensemble=1 2005-01-01 +## 00:00:00 2011-12-31 00:00:00 +## ... + +## $count +## [1] 50 +``` + +#### Python Snippet + +```python +# Get list of run belonging to the workflow with `workflow_id` = '1000009172' +response = requests.get( + "http://localhost:8000/api/runs/?workflow_id=1000009172", + auth=HTTPBasicAuth('carya', 'illinois') + ) +print(json.dumps(response.json(), indent=2)) +``` +``` +## { +## "runs": [ +## { +## "runtype": "ensemble", +## "ensemble_id": 1000017624, +## "workflow_id": 1000009172, +## "id": 1002042201, +## "model_id": 1000000022, +## "site_id": 796, +## "parameter_list": "ensemble=1", +## "start_time": "2005-01-01", +## "finish_time": "2011-12-31" +## }, +## ... +## ] +## "count": 50, +## "next_page": "http://localhost:8000/api/workflows/?workflow_id=1000009172&offset=50&limit=50" +## } +``` + +### {-} + +### `GET /api/runs/{id}` {.tabset .tabset-pills} + +#### R Snippet + +```R +# Get details of run belonging with `id` = '1002042201' +res <- httr::GET( + "http://localhost:8000/api/runs/1002042201", + authenticate("carya", "illinois") + ) +print(jsonlite::fromJSON(rawToChar(res$content))) +``` +``` +## runtype ensemble_id workflow_id id model_id site_id start_time finish_time parameter_list +## 1 ensemble 1000017624 1000009172 1002042201 1000000022 796 2005-01-01 2011-12-31 ensemble=1 +## created_at updated_at started_at finished_at +## 1 2018-04-11 22:20:31 2018-04-11 22:22:08 2018-04-11 18:21:57 2018-04-11 18:22:08 +``` + +#### Python Snippet + +```python +# Get details of run with `id` = '1002042201' +response = requests.get( + "http://localhost:8000/api/runs/1002042201", + auth=HTTPBasicAuth('carya', 'illinois') + ) +print(json.dumps(response.json(), indent=2)) +``` +``` +## [ +## { +## "runtype": "ensemble", +## "ensemble_id": 1000017624, +## "workflow_id": 1000009172, +## "id": 1002042201, +## "model_id": 1000000022, +## "site_id": 796, +## "parameter_list": "ensemble=1", +## "start_time": "2005-01-01", +## "finish_time": "2011-12-31" +## } +## ] +``` + +### {-} diff --git a/book_source/03_topical_pages/09_standalone_tools.Rmd b/book_source/03_topical_pages/09_standalone_tools.Rmd index a4d6690af56..006d90c7653 100644 --- a/book_source/03_topical_pages/09_standalone_tools.Rmd +++ b/book_source/03_topical_pages/09_standalone_tools.Rmd @@ -125,7 +125,7 @@ input.id = tbl(bety,"inputs") %>% filter(name == input_name) %>% pull(id) data.path = PEcAn.DB::query.file.path( input.id = input.id, host_name = PEcAn.remote::fqdn(), - con = bety$con) + con = bety) ``` 6. Load the data diff --git a/book_source/03_topical_pages/11_adding_to_pecan.Rmd b/book_source/03_topical_pages/11_adding_to_pecan.Rmd index c4829297eb5..9c5d0051121 100644 --- a/book_source/03_topical_pages/11_adding_to_pecan.Rmd +++ b/book_source/03_topical_pages/11_adding_to_pecan.Rmd @@ -382,7 +382,7 @@ PEcAn.data.atmosphere::met2CF.csv(in.path = in.path, Vegetation data will be required to parameterize your model. In these examples we will go over how to produce a standard initial condition file. -The main function to process cohort data is the `ic.process.R` function. As of now however, if you require pool data you will run a separate function, `pool_ic_list2netcdf.R`. +The main function to process cohort data is the `ic_process.R` function. As of now however, if you require pool data you will run a separate function, `pool_ic_list2netcdf.R`. ###### Example 1: Processing Veg data from data in hand. @@ -392,7 +392,7 @@ First, you'll need to create a input record in BETY that will have a file record Once you have created an input record you must take note of the input id of your record. An easy way to take note of this is in the URL of the BETY webpage that shows your input record. In this example we use an input record with the id `1000013064` which can be found at this url: https://psql-pecan.bu.edu/bety/inputs/1000013064# . Note that this is the Boston University BETY database. If you are on a different machine, your url will be different. -With the input id in hand you can now edit a pecan XML so that the PEcAn function `ic.process` will know where to look in order to process your data. The `inputs` section of your pecan XML will look like this. As of now ic.process is set up to work with the ED2 model so we will use ED2 settings and then grab the intermediary Rds data file that is created as the standard PEcAn file. For your Inputs section you will need to input your input id wherever you see the `useic` flag. +With the input id in hand you can now edit a pecan XML so that the PEcAn function `ic_process` will know where to look in order to process your data. The `inputs` section of your pecan XML will look like this. As of now ic_process is set up to work with the ED2 model so we will use ED2 settings and then grab the intermediary Rds data file that is created as the standard PEcAn file. For your Inputs section you will need to input your input id wherever you see the `useic` flag. ``` @@ -496,12 +496,12 @@ Once you edit your PEcAn.xml you can than create a settings object using PEcAn f settings <- PEcAn.settings::read.settings("pecan.xml") settings <- PEcAn.settings::prepare.settings(settings, force=FALSE) ``` -You can then execute the `ic.process` function to convert data into a standard Rds file: +You can then execute the `ic_process` function to convert data into a standard Rds file: ``` input <- settings$run$inputs dir <- "." -ic.process(settings, input, dir, overwrite = FALSE) +ic_process(settings, input, dir, overwrite = FALSE) ``` Note that the argument `dir` is set to the current directory. You will find the final ED2 file there. More importantly though you will find the `.Rds ` file within the same directory. diff --git a/book_source/03_topical_pages/94_docker/02_quickstart.Rmd b/book_source/03_topical_pages/94_docker/02_quickstart.Rmd index d23ed2bf550..552da07e98d 100644 --- a/book_source/03_topical_pages/94_docker/02_quickstart.Rmd +++ b/book_source/03_topical_pages/94_docker/02_quickstart.Rmd @@ -1,4 +1,37 @@ -## The PEcAn docker install process in detail {#docker-quickstart} +## Quick-start docker install {#docker-quickstart} + +```bash +git clone git@github.com/pecanproject/pecan +cd pecan + +# start database +docker-compose -p pecan up -d postgres + +# add example data (first time only) +docker-compose run --rm bety initialize +docker run -ti --rm --network pecan_pecan --volume pecan_pecan:/data --env FQDN=docker pecan/data:develop + +# start PEcAn +docker-compose -p pecan up -d + +# run a model +curl -v -X POST \ + -F 'hostname=docker' \ + -F 'modelid=5000000002' \ + -F 'sitegroupid=1' \ + -F 'siteid=772' \ + -F 'sitename=Niwot Ridge Forest/LTER NWT1 (US-NR1)' \ + -F 'pft[]=temperate.coniferous' \ + -F 'start=2004/01/01' \ + -F 'end=2004/12/31' \ + -F 'input_met=5000000005' \ + -F 'email=' \ + -F 'notes=' \ + 'http://localhost:8000/pecan/04-runpecan.php' +``` + + +## The PEcAn docker install process in detail ### Configure docker-compose {#pecan-setup-compose-configure} @@ -65,48 +98,49 @@ As a side effect, the above command will also create blank data ["volumes"](http Because our project is called `pecan` and `docker-compose.yml` describes a network called `pecan`, the resulting network is called `pecan_pecan`. This is relevant to the following commands, which will actually initialize and populate the BETY database. -Assuming the above ran successfully, next run the following: +Assuming the above has run successfully, next run the following: ```bash -docker-compose run --rm bety initialize +docker run --rm --network pecan_pecan pecan/db ``` The breakdown of this command is as follows: {#docker-run-init} -- `docker-compose run` -- This says we will be running a specific command inside the target service (bety in this case). +- `docker run` -- This says we will be running a container. - `--rm` -- This automatically removes the resulting container once the specified command exits, as well as any volumes associated with the container. This is useful as a general "clean-up" flag for one-off commands (like this one) to make sure you don't leave any "zombie" containers or volumes around at the end. -- `bety` -- This is the name of the service in which we want to run the specified command. -- Everything after the service name (here, `bety`) is interpreted as an argument to the image's specified [entrypoint](https://docs.docker.com/engine/reference/builder/#entrypoint). For the `bety` service, the entrypoint is the script [`docker/entrypoint.sh`](https://github.com/PecanProject/bety/blob/master/docker/entrypoint.sh) located in the BETY repository. Here, the `initialize` argument is parsed to mean "Create a new database", which first runs `psql` commands to create the `bety` role and database and then runs the `load.bety.sh` script. - - NOTE: The entrypoint script that is used is the one copied into the Docker container at the time it was built, which, depending on the indicated image version and how often images are built on Docker Hub relative to updates to the source, may be older than whatever is in the source code. +- `--network pecan_pecan` -- Thsi will start the container in the same network space as the posgres container, allowing it to push data into the database. +- `pecan/db` -- This is the name of the container, this holds a copy of the database used to initialize the postgresql database. Note that this command may throw a bunch of errors related to functions and/or operators already existing. This is normal -- it just means that the PostGIS extension to PostgreSQL is already installed. The important thing is that you see output near the end like: ``` -CREATED SCHEMA -Loading schema_migrations : ADDED 61 -Started psql (pid=507) -Updated formats : 35 (+35) -Fixed formats : 46 -Updated machines : 23 (+23) -Fixed machines : 24 -Updated mimetypes : 419 (+419) -Fixed mimetypes : 1095 -... -... -... -Added carya41 with access_level=4 and page_access_level=1 with id=323 -Added carya42 with access_level=4 and page_access_level=2 with id=325 -Added carya43 with access_level=4 and page_access_level=3 with id=327 -Added carya44 with access_level=4 and page_access_level=4 with id=329 -Added guestuser with access_level=4 and page_access_level=4 with id=331 +---------------------------------------------------------------------- +Safety checks + +---------------------------------------------------------------------- + +---------------------------------------------------------------------- +Making sure user 'bety' exists. ``` If you do not see this output, you can look at the [troubleshooting](#docker-quickstart-troubleshooting) section at the end of this section for some troubleshooting tips, as well as some solutions to common problems. Once the command has finished successfully, proceed with the next step which will load some initial data into the database and place the data in the docker volumes. +#### Add first user to PEcAn database + +You can add an initial user to the BETY database, for example the following commands will add the guestuser account as well as the demo `carya` account: + +``` +# guest user +docker-compose run --rm bety user guestuser guestuser "Guest User" guestuser@example.com 4 4 + +# example user +docker-compose run --rm bety user carya illinois "Carya Demo User" carya@example.com 1 1 +``` + #### Add example data (first time only) {#pecan-docker-quickstart-init-data} The following command will add some initial data to the PEcAn stack and register the data with the database. diff --git a/book_source/Makefile b/book_source/Makefile index 5d57f169a91..1411d6fcd75 100755 --- a/book_source/Makefile +++ b/book_source/Makefile @@ -23,7 +23,10 @@ DEMO_1_FIGS := $(wildcard ../documentation/tutorials/01_Demo_Basic_Run/extfiles/ build: bkdcheck mkdir -p extfiles cp -f ${DEMO_1_FIGS} extfiles/ - Rscript -e 'bookdown::render_book("index.Rmd", "bookdown::gitbook")' + # options call is a workaround for a behavior change and probable bug in bookdown 0.20: + # https://stackoverflow.com/a/62583304 + # Remove when this is fixed in Bookdown + Rscript -e 'options(bookdown.render.file_scope=FALSE); bookdown::render_book("index.Rmd", "bookdown::gitbook")' clean: rm -rf ../book/* @@ -32,4 +35,4 @@ deploy: build ./deploy.sh pdf: bkdcheck - Rscript -e 'bookdown::render_book("index.Rmd", "bookdown::pdf_book")' + Rscript -e 'options(bookdown.render.file_scope=FALSE); bookdown::render_book("index.Rmd", "bookdown::pdf_book")' diff --git a/book_source/figures/env-file.PNG b/book_source/figures/env-file.PNG new file mode 100644 index 0000000000000000000000000000000000000000..f1ad8553b90458e1b4445b0767c4f304f5d3b89b GIT binary patch literal 92110 zcmb@tRZtzz7c~k&g1c*gUXXAc@v9mYj+>YKZkl$#RVrZrWoNWi_Q#KU)JSp4MCnPaf6rPU z=dZsSM2Rr|GuoHllF3Pc|IhQQ-~V@qKDIAixv}cx3CZ?UIv%qk zfulDs_Q(>gv?6de+)wD-7cBTX>nbJekYB84t?L(W{61JNjK0T76fehCqT1{|3UUlw z&gWWvbW=axC4YqqI3Yl~yO)DlCit3(k35${u&qkz?0@w_43HSgA|E<;JcYDrE^0Q3sZ5D9~GZIE4vX>KjJ;1b2=|5sG!g{DO1wa zPZb9r=@_>!JYF0(w~r?`n2&-G&}vY&^}3~dIDA$cr`b@u{no?VH1C}nD;}0$z6;hH ziXo*uwia5J#Q7M`m<+l}#Kt)_FcE8ed~vjD3y~E7gtqt=ry2ukc zqIUg0%=8ESbRvK2@yJx|AV^X5(6iJ4`wcB78#lCbCEn>>5E|VHNQ^C#SI1bO`RZdsie?th<;{5>Hs0ptlw}8f~LKp?<;Tt6d5=>RJc8DOlZJ zH;1utof6l})-_uFcFU|d4$J#h7srEAp0oPHcHHe}Wr?hJG zn=kZrjx13w+b-VI?lP}tMZO+a39DmFP-nXrid4fKKqmfL}ndbK-i50U5K!#o<5a|;qi|tcsVhGw*WN6+h zuoMwbPs_%XH(4hXV?;52GnMGC5Lo+R70<1rGW?SlZ-U}}@hlmQl{L~!3TN>k*N`b6 zW~N7YJk2z}+z_~vQR&Pa(RI%g*YH%$ntV{8Z@ zs?Bksqlh$99Uo+VngSkLYtEdFI6RaRD5^i#3NSMOmY2ksV9QA2NcVx+m=geYwiK7r zj^Woul77vVp|5dkUnWyr8hl78*U>&a&Mu5qpDze9fl5#R@vPnu%xqyc+h)LS@~QS{Ip>r?s?Y-o+N1r{u&*S>II$7F#EF>-l0}dE z?Nss05stHlU!ZYVXu5t9WD!0yPRp_>lx_jYeSB6@u1qMQxd#N<;Ud&U-3rJ5pxCq}T$2LX|V4xY1;>z#R!!MC1PIdo8C?cYk%_p+PKWEy5g} z)xaaR6n76;3#NHD|Aeb;-HH7U8LJK2WVI<CLF7M;VYlSQ8C!%Kv{$(J0G$l%Q0{2w=x9>af;@eXis&Bl?<^4BB>`NSZ-A;3^~WRl(Mb$z!f3cE(eP8 zbH#F<>9-!pPU7xubQ|?E1NtCtMDz8aQxZMQ$TN|i4o;^rnrzU53v--t!dC44ZZa;e zeEg2vlp_-PT! zOVukCd0A$SZp|yz*!Ge!Vd{T{T>Not4As+hOt+(>trrK=;2Br_2vNgrA>|eQEyb$6 zVQYZug31??V`CARoD4@32y?Yu@%^?4FXw#eqMX&%ax3khy(TQ3@ao2x|BVRliz3F;FZeai7ax9bj8*e5tzv>#7bZ;m`2O9>-pse?+b{{Tz`4f z*nC4V%62dj0E-XH|0OejV==-Oqq;s9KzrFNCiul$Gj@>ddrXP!Bhku97I~@WumAZ~ zsvdMLZ(mb2%>eL8WQaHTM%vgY z+Dmh%W+rq~#R>$%s{JYjr*Z$@z(;9hbCrcP0y&u>6&w^n6!a&3L(4#B$^TIGwN~!S zTP*1<7)W9tF~_GgK&?U%8!Ut-VtMNiqkmLfJzcnji;?+KeZ$wh{N_+i%-?YpIV$Q( zKjl~!q|dVrkF>}#i{h>Ybc)q-1))n*qntn40i>Y|^8x5&p>;Va8lObZO*o%hI z`%9;$i#XVzTfS!uG>$GvHqaaZ0aGaQvm)7AKIhXeIL1d@UqugPqJMZPf7;gi(Jm{^4K;)&c?|Jl+oe8#EL)zpex!q9X3C-f zZkHY_jwtMGH~VIE0Tg0<1$I_jJX*XJqwzu3*~|m*v72BmDDewbgOAZ1hNaQqxbG99 z2->vEOFx8!fti9~=&pebo;@pu#5CWs0o~{DPvfjN@{%>6lf|1edhuPLCW2BRoQwru zO^KJi$eoSvyTp=cR~+3k*XlKBh)#C!5`&iW+NkGF3oUx%1qjy8#T!WBo)mlerzSJ$~(ydk+xT z@oTBoPUAH~LNC`}Y&@`TCH>D+r+wZiZt%j@5SOrh{@bG0z&{E;k_Fkx!J>r_;A=2J zy|waltk4=5OW5fM1F_G*k;geSlh2VncLQo|!{`ra&IM zwU<=qSKo6r3je;eaXR@02YE*5Gd~rV{0^>|8zxE4RFtMV&=rQubtEp)9+FPOBGd5! z$|4@x*-f6|*E%DltZ*O0T(N*)G%C{K5m$NUBA}lKM;6jvz>5Q_8sQ^=WC2fcc#I_` zUxbPj^AL%&C2(f0V{FYUYr@)5n5bpJ6PoUolruu{S<~{v_lM@;4+`PMe3R4eCuJ|CB(b^^}NILe$%V$e%<%2)D!H zjxJngQ;Ip%3&B?l-}Gb7JHAZGPz@;%l#A5NvjRa|pnAPJ8JA?WkeN-t_?FZ6leNee zr}%bH6e1P1kgFGu1?YcjMEbm-8`CY4q7vWA)NAzoBhOdF8_Ct-D5N?~FnU1NCfzo1 zcY4IV5c*zB-bK4q63xnepNr#j_f<{g!%W5U=VV2aJXQkEtdNaC2~&7uX3zsXDJdH> z_t@#_&1g8q)5?#&p}`pTJuAA0=G0Z(+ep2>#XH++o8xva#o6?J?xz-Q0XO3Jv)o`@ z1ist(eMQfX$&SkxQ6K^pHAdL?N`@InuyN!RVg5Az3IBpUo;gD6mCak z-932jo!VOL{3I#vKbiFYydl2Y9y~c#&)T)KmN4xu->>9E*?&Vw8uw zrciT-KJ^GpkiF4*X}%;;PfoJp+eh<5#Do8>D}B#VY5tWpA$#DB>4y6dI!y@S$`w}C z9>w(SZ(yuHr~7Mti!_1G4GZ2rhr`zyJIwv(V3O1OiXf%Q;)Nc_XFMJ97CUN10oree zH*}Ofev6BirUbZh3NQ)T3h2_wgBJnDBFKD*>NqI45?+Kis>qAo_4@Nl6u)tF55N$K zMaH<(OGR$xRgVR0g6=O-DvV}J@-XL2_XR1~`@`_$qA>YsSyiU}(7@VY+? J`iN zmb!#RIoN;1GB>=SCe}6p3!6$Z(#b{%r3W1q@o%r!ZGwYJ-u08;*m%NR{V5YYeaxe^ zpDEn{SoB&zd#|9_>^6XQmtp^SIwlH}S-QQHk_X42*Om}!PRqdQF(*IAw(mJdh5C5f z*o%C-Ls!tk`kc~btq6+IFWQPt8J$RJfuxK*3&{W{hkw>Bf}!6+vt(#Z_dbHXJzg2K z2`PZ!+gRktFLsc2quQ$@G`o!)iop*OyAB?EHQZ99yDmS_A)l4 z;~9+kyQivuKq6*nKEhg_7foGPiq8t3gA((WRz))sbRSoFh;zfjtupR^x(5Q}koJBh zYe<+JIn#)dCnUWS_cb6}9Cednd2>BxXIS`Ao$`2)hi^L5~xg6=Z0>5*+>Yiu5;H06_OXvP+{|+lIC)cJuZg`UpuRcyDruLN+eyp|H z$a^fDf!W!D-#;PrVnM)iKLl43y57OZYIL-uxB>I<^u^mVNzKg(Q49 zaeO_*{%K#|%Uz(>I+)@3m`2Cb8wOX}NrEG)%p8Z)RUPGLAJMvr>)p5{-1SE!nD*!d za@=nH!yj9u4LRWf+pjs|noPQ^SnlYULDU)sc(27s>E@qXC1n)lnx54tNQ8=W1Alte zzNss1!qnEix&$2&DRPq0G22HSBWs|D4D!8D`r~_a?x^*8bgC8Z)rKik#>y7!wY7*@ zI=yUrvmj96IBL!S6pa#d5$qb2nwSSzU!V#?kDQ`JInQk0m?2xwz2QP3sUAld0-Dr6feWOLfwhyhdz6!%axlO5z#LSY zCVR9iimj^LqGjqSdP#H3bD-s7pw&TyG`wEkm}fsbKCF)OXT53L7pm}6tH1L*d?Nm% z)$NtKbMn3(ze9~WiofTrre-;99*V%#r9}M!Jyz8Q!-w%WfIBk*ydF;ts516iX%@&j zsmlH%2IhVduTBN}ql2NcWE-MRU2TNY#j2B+Z0KN~ps+8g8|d6#OKo{|z7)r>qaCq{ z4QX>f!2=gdGd)7*&g-hJr)67d-t02+yCB@x-FLKE`dW6zn-UmYsm6DxeHHhv$d|8* zeV{r1)?uM?V@CUkWL62Dn6E9$c0T?5Y#`VIZ+BwjZu&a@Ld*8FX{ zf*1A4_JPu<)y?+hE2fr z`>-?-ddAwYC(6jt>jW)L!rR!1+kim7xDDwJ9n6Iu z@wS$xX}<8qlB)1h$l`n@!LxT5e&LwS!QkzL`;h>UN_xSK$??Qkq9$gqi*a3F}E|;vgRvfpv5z2DuM&rFLXt+GU0k^COT@SU?NMd@<>Qu%4 z-eJqn3vMr#87`r}K&ujRvR~hO3e`qd{qll5nkwRENYbk|*TKG}!fP6kZtT85-~8^d z8e#gsl{-188h$t)UE zr8TeO;m=#zUG~dr}>(h43x-eicmfy11N0;x$ zU*0Ap!*cfHx!De-gYhi?5drvyOOY(@womz#uF4mE-6r0zTWt}!WADuA(+y;cnfSwI zQXC|cuIOE(9v7G2@h_bGjpkt0gE2yd@b#n_(bF6>i-SM;kR^*qr=ZP-j(LoMV&z-F=T~k1V zL!s#D+JE6JZ#2`ba4>Nd4p*X8xfohWvOkzd{4AK{#PrK@4S-ugA#hyQG;>L|I(Mw>~Uw37Uw z+rJU>OLjijkFcQ(H>>D4QVPsS+Qmj?I8Qay27;4^Tss=3o`pvG-1a5dKq^6Gvx-aP zfb_(P4(J|^ON5Yjc@WkhT#R8@Va4K!1cTKmoL0DrBf@6-DIh}^N8P6xdLNbD`E;CM zC}pLTzml)Hb0UjO?8{i6g&N8Zu!vmn`?-BJbc13)FfSpANuPgYz_3n2f`=+K*=l)b?p7rEuR9|0A_5g&Ih#;}d6q1)c=DJDJ=5B(c z7M4z@xTj6(<^+??$`pmnr!^E&MT}NHjF0ZUr{ajM5aQXhT}Fd;I)h=V?n}MPaXO2zK%t{)J^CoZ({ktPkxky}&3DvUDRo6#Dwbo~Us=R!%&RGK1@A|-%KFb) zb-}s}qRh#mZ9cYflN;?5kLEcHrBW6`#{y$#lzo*Z+lIJ;&eb7C$9{dmg^3N3@BQvh zXuBv;Tm1ZP5V}-|2B^7n$&cIQ>4pxkuF5pN(64>f?`TwVz?nLBw}qW}1>H8w$2=Ay z*b)H&+|gk-f3`{nDmt%flJ632@uJ9>@;@gGJoL<-p9s~J-L z*jgoHaYQ^r1dbi`eb5i278Hy5TM3IHod5dRC%Gt&QRMo&@Ql~Rk<(3CN@P81sb_m{yP-ur-;jyyHhX0R!=$c1~Uo%>A^7!i0ZN z$hWQtip?`S(}-+GFnjx;^&Y-cSNiCBL!5rkxB%7msGpsc(S|0;Pl(lHi2A9Rciq`n zGmA~Ri;JZvOGv#+;$O&*yg$)8bJYoDs1 z&`JneBv1YV$Oyd0;&IiT)fg0;?0X&vB$xQF;V%Z-Ic*KSJ!VUdyZqz`%${^wu8HD~ z>j_d=f5r+4uY$ehTBjBF56jhsb99LMqwqg@J}{BybWsGy7dv*36~he zcNGYrllpCs(#>~}^uVwjw*+qtHV6vUS^Oc7ubfqp3iT$x7P$#SlcFuo&`!wP>a}`J zQ?Q~ufRQa$1%7R(|K9j2X+-GeU!pKoRg#Y#DFLr)yXu}@8nLZgHzD5| zR5S5sMiCM)=sD(<(v$FMvx6Fwv(g1@w>@YAa4%UzPvDs)J;tP_>G~l?u+h=?4M6NR zl7iXj=zgu0jXyMypS@qCxO+RnfLgf)dVo~;OXVTbwgXRf%d|@q)@EZ$J>0k_r z;wC3ssoXk>E|_#d5=xw%8puZVrw!N|XO)a5C9)3e0%D@B5ud%R1Z1=W;a&XW7xdFR zJO)U2_2d4P48bU|p2kx3#Mf|x_T?>EVm3R}NdwHEXsVwlD{|XnqnRU2QBXIp_0+AB zrp}ogFRT*Zzg*NT5MjjMM4Wnbxk{iob(4K7><;2n4-=w@*-q`k5FJ%%Ho`p5n_#Gk z+MXT_OyWN4LkQcQQAr~js-(nCYPD3;!yPZ2k%L^O>Jv$Ng`)@Tfgbw)utlu-G8^E1 zpM&?P{LF%6Mgo>K(d!l`y%>-)t@Py`E%yacZRAxNnmy+jR%{sw_g;GFPQ!5G>^hw3 zuI+MM^3{kaqm$LuI?KAzjL-3?%G3Rvy!QE~{KApXD8 zh53VLM8juUH=KjE>}hkxTKU>Vrh#Eo=`WosNow{xG%M2i_^G{$dir05!g@Gyx_#*) zK5Nd1=@JjbpP_HAiJd9Ye9mph-@rybc#X@|dg@$hl|ERqa{iT7r(JsRi%uOl*~vhC za}i&8n-}K5>#8oM))o|$4pO&2cQP!X?Ylv}Rad?FZRj-@AH}A!|GiSj0?E+m(7TO1 zP$NP(G9zSv<4?fK!gFo-*~FF^ifPAdSqSFQTc9E`Rc4_Hrte4I$*xaD@V3cjQO1duM5V5Heh8$>BF3}*dXljb^cZkm9ObUo8;PbRc};)ZYi6LMG>oHX zf6sgj?_GyG*WcpuJCakfXPY;mAS`Ohb!oqsInxYMUt zfKcKlBa_qKR(Q$dlJ3CQdzgdS^_Zmyk1ijE%9{76ANv@GWTOTm75^~39?h77hnqUP z81X{I$Mc#@zl?KY8OGwv9&^Yk-MbD#h&$fBxzIW@4Pn@;)B=B#EvQ{-l3yWuWDgoyT-gff_4csVVk# zRGq&|MX*=g++brBAfI!XQuT_trnmZbEAbZT=)mHj(@s-y|EF5SVdV8wblWs2fu{6H}%=4@^twu~_V zePQQ>nu3woGV2!a(*LL&^Ho7Vzz$)fSZ%+gngQ=0F;aFhT5O$fp4cb(nq?`W6jCw5 z*h^zcx$~|=f!(~#cy?RD{=YLt+<*f(i(rIqb6e7lUu&iA1Qxl-xACtc89X z@uvnJM1o=iNoQ8S5$UKeomF z%k+UXX+i%CwtGZ&)xCMAGREb7cVm|F<#G7xQ};y@tl*hJj5|t`HLXZ&%#jcF7`tS) zzy>1!qfyjGZm<7a6#HvDty{8E{N|jR_lW%N_?N{cC{^WyPAf4%zibR6{1*id^^mgM zpPb5%Yp9ITh7Eu~u|qa}{_!d)x`Sm0=n{pEhi+KkH!J^vaO$HhT|}TzGaEbBisj3l zaPzyz8g`FjwFt0kI@M%58A{^BW3VnxK#d<<)60I3ioksFLC5vFjZH1|rlCm>${!b&EeMR~RB5~7+A64Fe!HoDg0W-_Lh zz}-^32P=1WEzZ?xb>D1)LTAZQZ{1jHk1es+)SDG*}gv zc^acu2_K^Z-F+Q&Py#%cHje*%`g}*Fa3Yo@Rv_*)l$)BQG)UiS^RLI4!+4z$P4vr`{F>$4`fJqx^;ov zUWsUKL6nRb!C+mE&P{Qj{*GHELm6chvK+7D4c2oJ8nx|9j*=iMc?5lQzj1-fF zA6z{40$~)rUFM8}(|)epK_d!7Q?7F*5}{BOD)nnR9?cyj{yQ?q;Z`&G-Ij82Yq z-JJL~8!5kqa`##|;9nmqmHobV@*;Uvc70UN`FUg+1f|k~ILqz}=%-{{5MOcw%UWMw zUJvTU}{A|M{9Ve?pZtUrb>t7c#1RX2ok>WWjyc(%Rm5rbJhtnZ` zc*VEPDm{<<_2fd!S^Q1Tq!UIY5?BoQ=K#KrrTCQaR_@1pRAk1GvMouD*;TCyUZn#q zNw?Ma-(h1lNE9U#;cikmF(AC}62ZaAD9EkQ_d>K5{ixPHZt8y0CHyU9Nd&#}L<6_j zg>ku#OI>=Qx~9hP`_`;;1FJ}zLHAr%XXH!O1-~quv}?=;=&*_1XP?RK_@A$0$$Am2 zbI7M0wmQa3D(TEbEZo+O_l&Ch@64&2>{x~^bM6in2P4>XuD#u>x9w3k;$1r1Qr#$G zj&3OMC`po*l29HpT|4?OiP;lo_;$-GxUtm#MP7MRsdcEIRBgA{J!mlMVh- zogX|B z)A-Zf5k58-%e~oqc1ZJZWUXgc69`HzAZXT6f4i%1|ZO_o^^ zd-ruPQ(Ua`r$J%2Z39+Q1K!8ZPXo7bbyQWoLD+h!(LzaBbbd4E;`1XSK+vIS9M~w_ zzFtA$q#`X9>6z@URuOZiI>t4Qp*x%xE#GM}!<1CiC`3D48r!K6=UT5(YrOmG8?kiV zMba3P?`u5go@q8>dc8qhC2!4uK?G1I@)Elq3tRidnY4RQUq2kkeTftBLlA_Ybqfoo zOnQ=PY{=R@G>yI9P@ktV>|_|K3iVR*$ZU%+mS`8x+{W)#N*5bvm@o2lXqk42^X2_5 zI=`Lr>5>lz;zS-Ey$^h&$PvfiH$Q(eCAfd%&%6HD46Qg?!4i>6Co|X||E#iGB=_}K zJQ-h1*!!s}a`Q`mG(O;Qn4+2e{|F;7i!%JDryu}pKs9F(BDXHw_mJ+bAR_<%Mj~(5P5_Ir{G&9 zx>R4!_%&r(+VP&ztu7RUZO{uAw$?;UFZcyD+}HDh6!o_$Bx;O`4iT!jY`aY48JG)` zCP*7~D6!4-jx^ti+mjg{1QNv_vjk%(2Ne^MqIZ>1^@%YX5lO!19>hjjWnY%7Nl8V?lG;h{NS zpX;828`^OT*)rO_O>jntrk4vLN;L=8T!A0LgzuAjA-Ekc@=$uZfxTv?EyighIB*)o zub#FQ!8_{6V|Y@D)i9^DoPsyM&?ne7edFG1)p_2Jr#{zi@hY>8_Y9h$G)edNItUAP zeV1Sz8LCI78qTj2J*}=JqMLif7iZ<~pLoBMgRbO*PgW<=<{wGii!xTv77B8-fpkQO(_!(iqnwz`{)jb}%^w zQ|)qx!(Tl-unc6UhP+y*du<8jL=OEUT3_cez55cu;A5bo zo=AivYUy?LCG+?5<w*Qd1ukN1 zefe%1sDUmRxa9ceVT5RGA*8HrxjkxAU1@voV$fCceWrz(B{o-i7*M_)Y4yr0uA`7c z-n-+=^M1th$#b7BL2JA&d$)SbEZjh+u0ngXj=89kQEqUKPjnfxI%6v{H4_1M#Fr($hmq##?~l6qH+w9{BMk3 zcQFXLmHD&J@l}cy%%bln0$xB^v}8;Da`Q+yZF8x#EYKq5yN*X=OTFy}dElu6OEtUt z;?#lB^F)*TjscUZT7smbaee`*a|3+(n>R*Ral$d3H~2SC>m}CJ+DPT<3j37Bk0#iP z49WQL7lHqsln4=FP@1UY|8V0bc-av!ollg(NbH^&1)25y5At?UoE;&zU2-2y@gGAG zb1wgL1#$gf0nr7s)4DV$kBLZ^0rjEjv9_c4`42JD#6*jYiavLrIA`G|x)PiU9P)cT zA;w{7ITVerui({zdrG9;WuP^9$KpY0rKG@EX-589tCo?^BCaR^N|&&z%vd zBmO|yxTQ2R;Yw9hU-vHPpRe)rhYxJen7C%&_gGBybS~b7^Li(nE{f!+q>Y9AW5Gn< zuE|8m7R?|W`{0D0UUr@L?A!0F%~7}bge^WYC{1OX>G*ub;W+vpE(AliB4q2uluq_I z_@(@3dJL~{9}{xs%qc4ygs7pGpl3|6N#B^A*7zlO>`5i>ogG@_Ni7gQbXlW0k+5+4 zUE)*M#MFCNE`_MtuXIb3%TLIrc2$-7Zwf^v)GNGo{qg4!X6K08MFvPWfw3ej;W@0Q ztwh8|8K*k%-Lb9x_KjcD({%S}=+*yPcyZ$lzCOD$7R~@F?;$>n_iwlv*9nVXizn`n zAMl&r`D{0xjI1dstf?aX(%6D_uhM2IqXS@M2)XB{zeNfAJ-Fz6Wi(AKY zyJ$aGU7DB5}ebI(XK5}0nq?zUlT*n9IqwWnd?D_F1i zlj?RsFNZeJ*abqn)u2z!5k#p7)a!|LURQbGyxFtrPIKM2)F(1Ra>=KhG}xLp4zJy5 z9n^97SGr43BN{8T4IRv6>0vrbgEgw~Ebp(;l0PbuE-`Huf6$%IHJr~2zMquOU-)^q zIPv%@>qVd9&8p)nfP-{yb^@**F&uO3zmueAm~7g4gk03Ttg>1MXCy;}JZ&`4+WZH5L!&))piZx9l4(z9Y#p{?%lV z9_P4RTM>K6VF_mg{PD0`aFT`pgs9%&Y{Vt{ zZxd{|O+4dFq)4irQa;?((<=Of-!>UzYCggs5dBf*l8QXwkMkCY4p0VLlSX{3qJv~n zX^W?-7SwEnuvx;1_NZmyz2B)Sr_{!dF?KDsb9D0}XhrahVX+W=1$92+1 zqGZY5TU4F58#RNf9<+nWg{&dSA$T=&{!HhAtCn7N+*^VeVF7PM% ze(n@he8sDD>9|~Fyv%u@oEaUQN`}bq*Y+GwTQlZLi&KP2Upr$c1sM}=@zbpEtKGr{%A zVA`Sf%=+JqN3@x?qJt09MPAFvrQ&To9~8(le3XCJNlvu32%8Lt>vx`s7D^1#3|?>} z3JAG3MA0?_THJ#WvoOVNGQ-}z=i@(T-$R;Nj?;u@KHa_&nk?9APe_{;d+^h-%}7ow z*H)|#?OZa3hZWiD;><&@wSHsb1!_zA{SG=&%TGYxztAOT|4c|%Y;szt@SUk*@}6;@ zl_+iP&zZ;fXBAS0_@$N~VX?{V*m<%3FR2InN&bSZD?>3Vj^-XToab|i!qei47#A~? z3UsO$%wV?3yH|iEu1~;!5jmgubt1=dt5eRe<9j8>r?IR;Nu<&www!TJ(Y_ zJm@|8z!HeTHJc_y^-mz})%s?tNUJuWpP#52t73*9&P0D-~TQ8{fRy%G9QgXCD6pJ?*Gyb{>1;=t}#&bfrS}rjQ#91 zKPUVKyoh1hrGLu*(j|Nl!q+qt%1>}M>D>7aiftc%rS#Vl31YVIozHve@JQDRG0Y7K ziDnc()cX??d@aBIcfAt=>eKahL(nrq-~JU{@NoDF?K;q5)ShSiO(h#n1cEmP8#iAT zq5aLhI+U}HT)?JW3n(~RN;4x#K}BQ|)^~~&N4C$i$1vIqs@h=Mt0>dJ-0Rli)ID6s z-UJh?b4mn$3y!5s%vl4G&>WE zeHC@+k=Z6^FC8y2eXG&kI2iR%`#?Mi;VzdT8kAesC~jAMlj7Q7Q#T45;)delo3 z;hPZ6Zf24PBjR#Zjn}xmT-iUQZN#tzbOsY#@3RL)rM?q@-(f9Ml1-^ZVnu!_Gk~8$ zKE*mP-k4hu$(y~=f%(I2%GTZ4Vv{vEEK(B83%)?qbt0>tks$g29j@|c^ zrs@-R9sXdarsUKEPQGiF3@;~X04!3lQ~v=+%d_h5)HL}V{V6K?lT)bd?u<^x8B49+ zG%4&x0SieQ0u~w!=KnP2lT!$e&+2WB-}OQ&Rp}Z0VM&>gY#%U*Bk+Kzk+DJ|>Y5|T z8f~-{+;@L;fbNv&dA>q94EXKkUVxmviESxARk zP0=f>e!I;+4?N+fgJYDMoYKI|XK8wWjH9G^>6U&lB8y>^8sJ0o=8I;6IQ2>b0PkKqN?taIvdsZ;^W=4bRuQwFrq0xhO>)Ldd5F5^kL(bsiVHJazTwMWMl9+2PTOqrWePrVB5T z2Ikb}UuRX@hTCX$B0a#ad+}pY#{k3b1TU*sp`G)#YH@#+e`Sw8i7BZ@mC>HM`)xay zmvxGVl*b>_31-ZuG5^O`)5&>xs50MKh0N0mcKcUQ*|wwf;lN>_B|4w=s1vZ?cJyoG zyy%cq^Rls}GOHDfoFQ>T#x2Fq%Acj~xJ{66{^hJFOO13{V;-pEMp*Ho0dt)oPt`Co zf&FD3a4Lr>__PA$sE;=oAAo(}JCgW6*y+p*rr3+5A`)$sl|r|jtp4-AM8=ok2MgnW z_;wTdKri?|pqoVc|8Ku+ve{tD4+iWJK8sGwehY`#b)ye(CC9bmF?mePae4hfyZ47- zHSPa~a)>Qk;DK};)kIUtov)ZL{-X(H!PirmdhC%|(Zm3t^nu6k7sCkudltX#bJK65 zVZ;y1yENWjE@$B1eRXV%0O98QujAccg3mX|`W%v&zRV36d{kCnmBQFOdRnV!6KuC8 ziTe}{kYzguTn2{ARK_cL+wUw<6bGFr-2zLkc%M5p#7gXWod zWTDjV$eWI@`^whP#4u&*61s7zzZ3Szl1MZ@)^em}%+E$qnZl)u!Rv#&<2R#>W{=wz zZ$%P#NTs|SE~`JHOU=F2SmC_zW*b&tsMBfVvHhHn0M0_&A)Lg7H&CBbE*@}*VV>&V zw?DgA$DCq<@k#{3vwzFsfiD)xUrTLnvAnmE2|Y_s@ST-FmL3e`!UuXq<@f2Rvck>N=q0kVBEm0T^jY2w zK$_u*S^H#OHXR*~nQ7j%UH9mZC!cy|%z8_S)=CzRIa4q0tEDE@yA?1*{JjChH}Gdu z9>=MYk4NHdR-Q*;dP$2cljIzG<*-mi)5g%BBY!hd1WrYeD(Xd?M}hK@ce?d&1}8I$ zK`&6zHBZpn^m3Nr6siTh(ir3|9fnqZrFk_(-vuBC` z@8G*TRNAm8(aMjn$BqBEP|f-Uq$&a8Zv}+;z6l%G2C-yY@kN>TD8m>Tikea3dE{w# za9<$IA&XZN5;z)R4U#T=aBZIn#s>vcN9A|kPna-wyd`UCll|FZww0@$uCcsLhFj>~Ugt1l z3cnL`@r7m5+~xnZvF;wZ+u)oTXgT~43nZDyk6-&Aqk&E4d9nTP=(#2a_F1?{-~`RR zakS9P-pgRSfo8C{odn-I15iW?Uo2DgT3$0S{5_F($JobcC4i^eb1E5QyfjI2OPZrc z)gNiiiy*d*D{oglV;D*e=nNMAL)CkYB6XI*lY_<1=)1K{fRwH&+|#2q#Y3u-ioWg9 zB#?6WdXNUo+r5qBLgpJ#1|935`pdqFElPEA?w2aeM#tiWh^r>-`zRYx2I_b#U6hTD z<2riJPkcj?UeqLqxYXE@{xt&6**=8H>%GMi`eVw`{q3>72b2{LS} zx@)qo%_1!HL>_y?xLy3!Y)_)LushAJ$AL#enfZ*`SWm=*3H(b^c3i^>LHmt5Rg!I> zZI4_8Mc zdYaxI;8|LF&?$Sa&O0lP=MJEJr64b(a&hB`nt6Ai{m*o3TQ#>pXRyhS$emQh#B;v} za6qjE9|&RxP?(u6UYlznn?`|V9Z<4ueITBOC@id(VZp}4zK+}&Z*@87-I|Loa|-MgHe%*kXX^V%bC6khT2z@-lP zk>AtYiu4xjnj^#Z)gjF68t@ks1O!Q;?-n%hTl1)J2QvUlo6S54KJ3U>dxu0JvEfNy z0VOk+^Vh?M*%ZKWw7;3@y=CADCh5zAuAN?scx1~lmW4;gjiHy`PB%efj_(}9GfZOLx3D&JWj8#PR= z`<+u+lXv4`TdgmIq+!%ffGznYI(y=WWTMd*m&Cl%s3x*5vAT)7RFY2pFgeXZ7<$?GJvErTYW~pMH|O4@)=rOcSBp z?(U6gxgkXa-TFn7X$*4-SdI{^RGqYz>%Ha9AeKd=uazU)?$sK*kt_jW9Qb}rCsJu} z{$oZSw2~|0wzbaLp)D)I%~E-E2x1RwcB#ctJxtn#&g5*uAt2kvIHjoIv)H%X)Ha)G zV2cVHZ?kt;$4Wp)QL(+)WMyF(neeJeTYQg_i(noeHQ1CDD|r_d$+fbO@WgiS*8ZM> znpkvtc9>WJ5R3m{H%Qlia5RNPENwXSL>9+->QmI5t2L7oe=CDnYov%lZ29^f8|pTo zB*rhsR*v7LZXjYO`QfzZB5>(!72%C>ni|M8#ODF?Kz!VR6DT1W`$1W|VH`3&%}hQ8 z%v*o_fVhD6aDJ~0ova9Y$dVJET`Rbe`sRTgS`h-C9539czgY@wYuAvSrh<3s6mD00N@phK?k$H@;k8sOz3Q`+ZgR&$tP=dN{s~86D?2u!1Jq zBNrh>+gq_sa#RM!1Z-ttQ>GIWy+nE*(Zh5}5C!47x?;xfcQ&0IP8IVM#6`tKgsXR} zG`9Gv{s$PeMxEBW1dHRvn|Fi~;uqmKrnl0s1!3$=5~5s3M?vdfB*B&?WQWuWrRw!b8 zzZW#_^SN?G=42x}{gg}r&lP54fIN0cG7y_Z+TsUHjM`{>TpT;=D0$VAZ zR7xqK%=oc?3_9g14wK!;9Y)2a$_d%;w#O%|JH>{~l+LxO$@&HdyRyxu%-)an`$G69 z=?n10+-0Oeq0v8dkQL`suK3Z@7*WD+8hNT-3G3yFYRE#y*wr?wH9gq!7FGzQ@E&;E zCPE~~WgZZysSNTmbr8_$r@9&_uYd^1BFDrkyxHn_eUC)Syk7z$1hfPYKoKN7*bh|} zS<$v^Tgq`e>eqbSqg_odC<;Zc>6AWKu_Tqvq6&#Jk>!oHML1}^D1BOb#jAf|Wq!u` zKehU`DYA|s>D#4b6vy$Ay!oaLJ+ewF#==!exY zyd3+|oBcOiX{qYKQ27syDZE5|Z0<6>o&)wcmYQ;EL8wsZC~u#)TR~7wNCt(IRXWKp z{%Zm(kIuq$onJEvk{sa)Pxcg&`5QcgE7h8ctdrrBYYzJBu+P;fD#!u?WN`p=5pSh# zGzBUTRrGC@D3Mv^^C$GcmqZ-Sa<^G*O5{)xW)WRaz*{3fEtiU$H~LuH;?vgF&s7GN z3>9YGfmq!rz3?dwB46;)A%vzNGTh?k)d=e$4J0U zFtVE}KNWH`Dv6Th1`P4wL1#n}Phh-{*by7Fr72yW|9US9&5f>!X19Bp+Y==)i1M|d z#Bf8M{e`(=Xb1wnh0^>n%xUK7YX8Z~`|Ue{$u6j=-P)w&Mb^w|7N>;5o@u*8cnZmr zll}Eu#zY8$m8I0go;E2Z+k)@!{%7-I%(EEMfJiZ*?cQ)Ncm7M>np#5wfu?h7%GrCR zngkh~mS@@H8ByY0e(SKy1!SVOy=H~2%KiCgJ`!A)7DtJl6OEB&x2V2IrkGB{HewrP zcs3asV04gJ@!F)Vl0h%E)25KPhFFOsjW?`-BWR|c8v#_W^SUYRtXfIK`X}jvM3ST2 z9L13~rjq%o5+U?KTW+lXTC_XGg2^u|tdEgJ*1f+tUoSry;;BVPwZ!bwux^;w-D>Xt zc{CI!?}TP-_Q3q2XM?vQTY+h3R>*RsC5}Jd25lsB;@*i4;;Io}hZomAxESxd>_cvf zKm5*c_$(%R5#t^#%=PPo?`*_aD*(^oxx?iQ1EB^!4v zdJiR|?Zq;q#6pKWPZqZ0Rj)mmF4WKTBhp^zs0{lKO*rQTQO< zg0vdJFSROmE(bF4S2yQFA)Z)+<$sBxuaDktmzYZrojm0KG|kA9!D}3vIeYLaFALdX zJYAJ94`RPKYw7d-{nWG2n zgQdYs4T@D-dngZ&g;7uM^rUI-eaplXSGi&jBKN;Go6MG-*zG5l8>3{I_1l^f7B>fy z%JUOKL3=ivoTl%a;_BYd$XeEhX;ixDS3{%zOb}fwfRltIp_IzsAP9_9>xax4u!t^b z)%Cz9YLB_~Q7blCx3Rtr)qx^Ha$c){+m-4;0z6015c-0~$bB9I5sZ4T)w4i?+{Z2s z%%`?|LiAZfYs7Y=#Mt-!CBt&BZ9*;h`^s*RAQkF$@zbdmbGu&f;(@Fo2Mnv&THTmi zaXcbKv0(uN8?h?23g_*i6IoD~OL=$mlTV_s52Pb!IcJfI_A|g4=7mcMSZU+C29j}M zpFxpr5L}F(JaQ@R_+!Dt%UD5TU+HA-cJk_5$-tTGgj}7hoKKgt=v`QK3LF%2gy(Ny zF@6{wVjkuWD>IwY!<)$-t~Pfz2kP&9??5@1xRO|e5#te@;up%>R#a#FO)R6LJumDV zP_3l}?aRJ2)4lt;7WI4|mBB5jRn1`en3Yzb4*m<_y-B(wDA0p~kOvE)Cr}1y`dm?Y0;s__;6k1{neoN0Y_QeC zhue&!I7rzf(Rd-nGcC|OO4IzzGg^&ElCJ9>DUXm0*}VVCPTorI+Fe29ad~AF08L!& z&7|g6X&>eDwOF(t&?R(z3q#M-85UIkd@rD4_LE=1p?%PM2|*N@xXR&4F6Q5Wn}M32 z=5^tM@jE+nr5;ZLmB!t>ve4Vg)5v#fq?hgijUN++3um8^aEKO7-fZG2?wTjuaFSQA zHm@|E)M|Y?!+ITtkR75MU2OIar%92bx4*S(x-!T=&OYib z;(e7BH*#Nl7tsXlZWg_``A*3p^^J9a4xJt$tQ*89T5Y2>efDT*q=<*!mi)qtUbiSL z;etp9EQxxIy(Jh|EEEfZN&dEw@}?Q&%d9sOsFHB;-xlS?k|5GI zjyzBNLHLCb@7rx<9L#vg|3j)GiRZXU!=zE?s&)P^GN#@IZ@8O%`qXC8Y5zG1rAlSJ7b8qj&0tJF zNVi#{o?~E{Mxxv_UFPk043DjJWk5iWpk7r|I34Een2!+}QEo$*2gT(!hXiCRx4rI3 zC>?7gbGgQ6ntIwRsrH|noAR@Hp-ncO5AI~nGm}1|0PH;Oi}$)*<0sYj$$61)`pA=j zw~c|JuQ#Q!|D3eMmPpl^!tn%OTF2KEqlQLO#Ivl2@Az@U9wkCD5A~>WHKL+odVgv0 z9Itb^SWc;qMouyb5;k$=h9qeD(is%j=5YS?THoV)tw9=~huRv;0lO+)_Pj(RvV?Yu zZfmmEa=An+W%c`ppYqLV_n|{$CarAKUfV&`n@ygEatWdZPrE&%GG*af&nQDx+MA}4m$>;!3e)goaxv|4czaET!s`KvO+|Bv5Axh z{elwgA#52XNNF+8F;S8gZI}-8su;$_Lypu`08(y{MeU+u8SMOOSE=o>;mOrf&xzLb z;%7;o!~DpAAxpE5JFD&6@&)(}or)`$YKAd3b(fp>w$i%Du+>c{m1R}JwHPH^`D<&VvvMHB1%sb z$_CzhiX4hCt0y7#0W0~}k`=N+Z}HM18=g6FJz~!84X0-@{!wLK{M}6u@04f{Q-zl& zuIPbf0aLLNQt3YqR%W0Dyu_e$FF`7JSjtL*6#mDg@sA?a|7i-H3s3eWhtM74>_mvj z@vzzPk@Yzj2mIJjAY6i+&i>Ek@mVCksOleOXM*|L<=;=F!Ac{n7BU&1Q;6Z^0!~OG z$N!`+ei0A*M|yy(mj6$DMqv~N#z7f&JBoR3_lc1aB zPFeHc+vx}c-+m)J-1`6+JXyv_qvYbOz0d!=T5;TcPG{v>3OU)R2M9{I7?boO(2&iX zK14hTle`{j5spm@b9`>h#M-FumeL49%o65Cfuc_x->mC$AMg;83$5ZRydZV(YdxN+ zAspJEe&&ud3oMyK|JdorjdIq7Km+aR`4yA*hBv62mipL7TJdYdk~ioLV)}tN8`VAN zOW)g6Uda#)l%~5`crT^dLimX|wdp9AC6Pb7B>YOUEH4A(dW1!b`QG(H^4jdxPlGQm ze0fdosU$qehAF70r#i@_B1DcY3?W*S!sFSSx@r$&7O!-T$-Ca{@aIGq!FgN8d^{Ge z1iEp6G{er3o2T1NU&4P}JN^(A>M>6vSY_5_4glRK(1Qdtv0UwYJWit{@EUF+;5^KL z$Odb+m3?(n`7t40Mc#&hc63nWBeWF?a)gz*ZVlVjEs)>a~@2 z&)6@y29Oj;cSM%lB3f$0Z)Y1%$Zy7Dy-IJ|$Q10?|0s>QEU&+52MAbR<;JBtY$^;i zC)U((5(aJPome9lW)OGY5js8#RHB^jw4uAAvzB4xDdm@wL3my+L}dtISKp>-)N~-c zFP{f&9}ii$NH+#-Cl?XK8;qqtb6voh`skL^MvD6HHf7l@J+3@xF+aFmNWRzqRaCB^ z3bxo|(bW;txj$P|Ahb#QHob+H&lhBu7w4CCzV3~wH*iLtu^l(f2SKWCMk5jB3NK?O zQ71t5o4GsduqBKt849vQn*ZkUfJO-oSc>o+xKCv7Cdt~`4mvxdS#fy976Eg9a_>%< z4(1G8x~V`3ia&_H$o*1--e`BboEJ#3-fZNrPolX;5dB#BjFp$6puJ=B7)omxvd8{T*sixAwtVnlC0q{rBC)B z*MK@MF+I^9l_XByhTk6(16~vV`!wAhk5;vZEd$#K--rK-O?--1&;Ow`{4M`)=D9mz zAN3iT?!&acqH|bJlQm?V1|Ee-_V!uwStwmc`GJjPJVY~>%hnQEj|0}niLh{>dGu_K zwakdH9v;pQy?K>3xQ%3}(O5>3BM;*`RX(9OW#qx^R=x52UW4yk`XgX7WvKwM5NR9W zTlR_52qAZ^*bNt$d%0+F_Ddv5A9Jazw;V2S z!we6hX5+b$Zcd&7->F9r5t6IYz4U-?NN{I~>~v7=hid0>z3%D-^Xtvd8~!+z5-)9*J)(+KN1E)6L%l^jY7+_j&BUlf6evjVfVlQUYj)nFFA37IL?lh*6 zhV*g6;QG+52!ecFlGJQlZx6NBE%*&MPuY~C5kbWI%g9=spUZwGwnt5~P`Xx~^3?S5 zjuJ8Y_Nn(3RpB`>x8`Sj<$CqbYvDx4wGr0LiAe{JAD_+E=1BVf{(k=~j&VmziE}#l zkaYt(GH0v%0%nzUXr-ID^>NK3Cun;BzN)1Bm@vQ5{Zx4nNg{QJi%5_Dg15H(0P2k)oI zxEvRjv`JW0vd3Yi^=z3$-PhpeC(*!{aGZkg9b!&Mr@-&8O>@Cc{t~f|n}J29Eg^+` z;k8U8N@sn%ElVA9Ml|nMd*bCNgM9zstj4oAQf)nR?HfyVi@GWkSn~f^$7g%Kq&o=A zQs9eNoi{Pp`gvfjWO_kgG|{)ch0xlXbmjKg?juXExVi}#^jLDyMEWglHsp;s%f*BD z7q@6kI(4ElB{=b4y3dWXxI88G9sH0S?mYj-cW$tYW#mYIJ{6NnMboAAbJED~+c)XZ zJHtf);rvKat$TEgDnZuidV&CYA9l6q==Z3WHcTpZ3FGh3e(s44O9PX;4bucFMe*Ul z&0c``xz?+nWUW1pENtLe1`Mc=zAXC%l}K(biXaz>!}?XD`&Ww&^3mG_@h=HU-nkx- zI5{@`vmPtm4UZUzb?I&q?fvLUZd>k?$mKDgUBK<;Lo~k6kwMvJyqnypFo;llOfEG2 zNc^`lNWMFxi`Qi3m)TeEb6tw=njU6}1`Cd*`(7ye-V){(+SDc-443)JVg{u zCs?yum=$$Z4sCHq2LUW(_EK(%4qCCX8@1xsBXIcBovIBrB#Nnm*yhnlv`h5>jK-JO zG4o20r*!xGYkVKAI%Dd{%R3ST7PjkNp{V>2@Yw5O`xHV=4a#bbK2S5ShxwGvb zGLQ!n5qw%KF43giq{AGoCPLP?D8qlg89}F{w@Tfe;e-b>5$0kuJM$hX{4g7$ za4f+D?_{!~f{}>T_OhH<12L7S3UzKEiA_g}wb{qt@>{kRskNivkW$XT$BpV&fzO{_ z8OAk73ohlXL+HR+MUB@L{>TqEw!4xd(TPahIm~)rfxR3Hc(5qzN5S+XSMi=exws9+ zpQQvtlun@t@0r<6&%_*>Z;+`C=u9kxdL)F>(xi&t_g0#o(?Zkx%7{Vq#qZX-L&>!; zS*piIcoESA`*_tXegxz37tbSNV!lZa#B8R^Es;~OJfjF)mbeoiMI%HdA+khigTf^( ze<^ZjW=fTmb1f=$@ZyR`6r#^!)~uq{yJpNO)wB_3)#|gif3U<@d)q+6jZw8)8{~V9 zT)>?obK2V*n|qF^F1Ca34}Q!cUQLSzLKQk^E8}QOQVs-)$E&*Q03p%FG`Too%2#Gd zVA{|*Y(WMGZ_H<3EC9E1$!Za;7Ll7EckJfnhq?oPhtieoym&kvbZ|J_Te1$0me1fp z43y{N=wVAI8(;AKJj$_u!c!vnn+3itVP1zVmLVw`S7kLI3GYwZgtNM=Z%LS3Mpr(7 zOarrg*>mER!*Y>DcVl%-B_J#c z$>O~2)R6m4wuZ4tEo`Cm3>(zAs10_ZuV?iKbg9OH6K=;NxftEIJt~N@%yMV-gP=Ek zgkm1+*Yy(OleQZzDSc|-jA#e8+u_ha4!u5Ky;(|P5+#r-r_&EhC^b)~{6rNX5c9(~h<={?#daHdfM}l}Nc4KQs+HBk@u!s3 z+liVtDAmseA7q=N6-3;XV0?<4{wJTkRLEy;t66<(aHb!$KZ%GD!j%bse)s+c>lO>< zxNf`zk}N|G?i){!!**5{0mf?Vy?NkcSl`~>T*YNFH_$4OYU)aux$T3&tn0?n33z2; zO4C>T=-cM)UU^JbW~t~h!1w@9(w%Cj;6@l|Li_IVpCT-v4?wfx^`91IB$WyX=i2^D zQtSUs4CaQF?;Ic1>A+?%h_x%`Kl0%JAIX&nB@ijmz`mo1y`Q@wo{qcZpPJ$?rh4HB z#$012aoBV&oxoz(Ssqzt0Nt0y5mzz%e_!?Jww~VA?5=!#7N>= z^QwyX-{#!CkM+1pHeFwRraF%)n&`cz5AlZU5TbsmArV1hq8`N+rWkSWCR-&%ZAujthjHh9*r%h(_Xs!e!zr5y_Erp z0(W-(rF@bQ5s<#epVho_pV;EE*+*(deL!Wc9TLQ)Z{fZB;;7@3_RN+kN14R7WJ*o+ z&WY$uP)WH2N03v^eatN5q;JyneEDE|?vUwPBX|1e>;`4n7%fUNbf-I}%-}*1-s& zG6DvMn4?pW(l!!iV~O|PJIY__uO>gsYh?c2HmzNX$oM7Y5*9ok%3<|!=6LdK0Z90a zG+?WMX(OXUOITSx=8|6Xga2K0a0#7c)$=>a1_8u7My74Bv*onJ73FcW``#|0=!W&xW40l64!5o{nnX2ZvSXz5TvUU;dy^LBY_;nS{_sy%rL+1f!G>a>8l4VJ%z!rVUj_tVLLS;+xt zdPN8hR<1w~XxZ7{YRIl{R8mVY<@ z&IHv1L&st+L}u>3eIJ&;XM)|oncX^d+0e#zd^SFK(lf@$w{j~FD^GTjmKlGW3Q*%WO^{LgKblQsYo-!Jf<(w|B3Xk2zshn?DtfJoz zT1b3c7zm^Z!iM^bx~_+rNe*?^J+*SVZuUdq+NgPjtC%T($cm)-S-$<5fY z+=%?M_0W56VV4W`86A<=)CC=rI&;sLJ`Sl_hxu zoRf7!rtvRSU&eu?*qHj9ZGy;mj467N!+^e!*ew^twj2E$v>DGo+U7>@dLAC~UN3sg z7iiR-TyD-2N>DgnO#Dj^6YQnOh#&blQv+#|Uixysyzsk3qy!xU!P`{O>3sN&{|C(*65N}yR_Og8<{meAcgRzFJobrfpwqRTs(~1B! z^m+{;-V%k?U={@iPK}^k#!9YHr5v_)tjhmowbVbELBzXW6$OJ257{NlX9_+%{Sfjb zTRhynkt<}Z{*{ZZ(WvCMwrU~X(-c`9y))akI!$%{FV`xf{qnE!yeUY+QcT0*tT)rt zsGqU3j`UV^H|M91XGOIC%c!RZ2mAxX+})8lmpnZP5H*8KHn9k-$vkJ+ib<$@cZv*^ z3N@7T8p_cSYSV&-r0_+$zI@VGiYy+xP_|qe$;HhaPXHGo@*baG9_#jYTrV(%Sd2O{4P-RgY zjt1w6HT5g>N+G(Tu<89al%Fqu)6T!#v4(EeHO2BaZhvl-?H62Aez%*gb26>&f$8lL zh64t(1->-nEE=|&N`49PaC&fAEemWlw91K8_7ncz@EZ{MV0y7$RJof9PcIvC*|E{> z+wtK7@OTOgBg10#w)su{6H+=?Jstt`etgKxZR{qa3R;1{bZtn)=3FPRYmHmdp!<>G zUsdn}@xpF759j_y6zF^3QA*OW3Mxmd487N=dX9|dd{p^Ki0JWvI&N>&ARJBUgnW8x z7wErZNeE{nj@78aLb)H*jVm`2bRWGx{SL?G{^0dZ3qLd|xDC}^{G~9?SAi{!l;64B znebp`WVEUTfJtPIoR>JKK^=IOj{|All10Hf{8{FKS%9Z%Y04DGQ|$((7R*Z=v}Vwu z(y6|v3a@d|z$CuYCR$m8cVZXoYLyQOU621tEDf19SN%7{n6=Ii0_L&Vx{pT(bp1t% zhj{zfObTx|Fd@S)`1_Iqt1@C(aZ0sx=v}4A83S&;9}rrXKW)|uMVwy6VD1*VWP$~e z9XM!A3fGzsx>>+%#ZDh_d0XY47Z!lML(n{vACQ6D9v3VR@nf^^;U_N(5Yd|=%`?O> zRw?p>w|bmx9ApgR&fQvOfJR;@DOcyIG=rnz%$~Kj*oD`jis!kf-B_&9>yQfY-{(Ziq&*1X_uSWXc zr!mA~{x>28fZHxM?CBEWrR+=p`=~w#Spgo$Z+k{9sU1pYFM`w-MF5g_<73$yD}HZJ z2k^|?3&7V^A!Zf9?vuY>)H6b;MxK9HRS+%;81XNB(fG9?HKmZM@KKd7-l6xRtZnBX z&J4l*Avqo7<%3d{XCf`yw(<*Ai}}ZH)Q~+n4BL($7kz@)K0RBp-K{cQ-2_v2n3sn^ zzc&azu1$@5T9U^zde_}Cwa#SXFA*6!>`3Z|Q$iy7F8U6H`AMNz&c>dXB(prP@fc&S z|L+Z!R)wN$)^kAoiVv_E%tlr&RRgB<<7eReXwae$E5s(B9RZu83{qs| zSX>2DwR(7#+D72$)6+NEM+dipBpxq0hmY8CMu+RF-JqTe7`uD1W9{DW3HbEYlrwN7 zgJACj8XGh^1upV{Pm?hO<#Ty-skX9qpI#Q!96qpPt-Oi*BUVPj3i!cmEj8nIn8N9N zF}jP&v0BiTWkJ9GHN2FF)aT!2VN!PGt*`!1!n2C794MzXzKGN<`+)`mut>bQRWAvcoJ%ZVa*w2v_+ugU(0Uw41AZKKm2 zectWT2FYj4s%qaxl*xc^XwL)~jV5IBRkGbV+qKlFrioy3ZZOU4nEJH0BTVD)(s{3y z*dAnRO4KWUTke~u8^1tob_alWH;FOH)+9A?{Y!G5L07( zS@|H0q=+AHgzR?T%r*JEM{48%Tj=}_%gv5fRs3=iz#0N3_xPH`@wgU;duc#@?c?R@ zVYyq!A|ayGdmdn9W)FJ_;0g)->CT`L8~~~gPLRbMDt|R|teueTv~gXQmnt#WBgb`3 zAHh!miP~t^2XM6UM9YVAVv_cIQcg;Gv)7_5d4G5ly6`kN(~qo2+Ra^Ab=s$Z)u+$V zvCfx!D~Aj#DH7|I$c;^VEB~=Jt`uaovMnC>pv5+SGd~I`3!D%Ziug7+*J!Gk=iNN@ zvC6G6jtt%3<#$L*<8K6i!;I}i7U$h}7Z1w7X z>acYa>fyRMx^-1&ppBNk~lomWHRJ!*v1={gf5As$#y;`FwTC<0Tz{gQpY54RE zX2@Obgi~u=@2FAKTc-86`Jqn3$8V(BuM zqve9aLKFtg9-Q3Pe-vXJ=4=$c`0LWCCFJCj*$7EiBpM{zL;hAI<(+!>+vJJV(SW8? zL1NH-p|=5LBkUS!aAd&q!iw3~2JXV|=hHs@`y>Sp_vaH=|1$ou^(|@iUJimTBM*Q2=LMJclRqsnQ?k1MYP_*ah=sj`RKo^DdK! zNn29iGwkLgd=7A+{CdUU*QRw@il=C^;rnr&0gF3ZYV1tsq@G%JWQwdymBuGhG{@$6 zZE63C{1%(FCE@_%`k9D#ur_{#K~9i~i~UN(lGNVj3DC46-~?te=_(K(r{ zD$(i8w`bl<=dtj`Y(IhvXs-@i=Ps4WYBpo@Ti?CMKAKTiP;*tRk7jkFOHEU=;w_}{ zO~%2y{t7JcUj*aD7SWPpg~REDy{bt|8l+;3;bO!;`_1q3 zX)Da%laq9R<(-iYbz8*J(E9wk01@l)p@^Pj@Qw2ZT&YXB{7}qt(ivI;!H)%>R zBT-_HLfYZYj-DEyuBUJ29a*d=a^=yOI{tOwQJe+n@$Z}V07DYPXNnF_ zUxh$lZV;cA)~u!mwH8l6?v{h01!$ZR&;MFEkcfPRtM)QdK^*`%;!APKNgtVG&}(x3 z8g=v_(eoo=0?*_fu|fh+p7DAZy_);Xtz{#*@JnQiMa?){NGv(A)AT;-j?6vyu3O~T9Bm^9m4J0Mp4@#xlah`MXvWcp5%)8Q>D|`*uY(xvr&yrwx$TR=ZWPH zVV>NzD`Rw55vwE;W6YcTReDvuN+dX2KMwlYE^zVN4Lwz;~4zjH7=6yw7rB9TJ#RhxEItH73zZug##vJK)qUFd!>K4ea=|f?+-SxhZ_TtjT z_6OL2%~5(p+$JGsLC=gPIp!EZvJn^jKO<(=KcFS!)AQKyPA0b z8wTpxcOq$f4QLRUTCy;gL`W#)<9I@&7 zCOh;h#@sb?#mN=t=x=UWH%Uq9NcBUrQHOviiI30fCO@@ibvi5x)XVjXXmBVugwQVz ztw-=agoe`5ILK3D>+W&KIwerX^Tc%Tb3VyWeigCoYZ5RRG5X~7N??Q= z-L~~?xly%y0p-R|ZBFZOnVXrQNMn0>zmc5Z|9}{Q0XuRXW{NZZijNJ1bhS(l5^m~@ z_`WFM?)~)eQ>NfBW85|8)-jVB)+uuALdKk@?z=Jh6Netyc-v&w<71a& z*3$!-lv8_>T+u!)u*+4-trpcRrx(R$Ak!OL^`y%m>@)N(iC&r23O}-s0iL)!2ZNs? z>PFsViy1F+NwjfJ;kS6?gQF~75rYEb!dme!;~>pN30>xd(0gOt;V7M=w{hxe#6&(v zyA!h_#NlTptj2XT7cr{=v7fjsFKb&3Iy(&rW%YE)uoUy|%L5FqmANNZ>VpNKpviWll+(Y#oMuAqDt*lR2BcoQSBEbgbIkoJjQd}cY=?>u%- zoE+`IAwAOZ@3$Gh@hrXV;g~Fe3ePI!3LEzUcu1d~()yl{WTns^HZJCs;Hxk;o4}bQ z@}E;QrSh!gPn~9Aq8Zp$CcJO)XR&*XB5&S5vP*2-e<@ZQGBlU#^%BB??5rAJV898^ zFNn@6I^*|1&RPW)Lng3u`bN_s=7QI&@&ouCs zz<%fJwI;1DrIt&pLASSC**6J24(L63in!ZI7xUtei$cx)`7!#n zT4p{@I?OTiA6^~1|KJe_r_~o^L+30H! zRH7X_sI*nDy7&^l-i@Wj!0zoE8oqhiZ#dsbBAdKCL{E@SXdXzli2vZV$3jIV8#J&v zej`v9smT-z#9n0Dj^opi8on-@b(fpJEu|EFjR;Bw%*mm{f^Ia3Myh#*y&4W@oKuHO z^Q9MiP7dav^Lea@4N$uGor2dNhRTRW%K9Aff}1{n>%EsZU6}J6|2)QGFWGfnJ0JZ% zjrS#|O|%OcWgCiBM0)Bwns#m+51qh4lJ^jv~;LlZxN2O0Pj7v@9l4=$g=5hF!WgE_mQ05SSs>wPSwvm|Nv(K+;{q-QQvtSZ= z;pG+s7RiIJ+uP8st_4mNnH_5(QnoyefB}l6X|n)GlcR8KfWIv`4*#3SyV+J@WTQWx zs%)=TwkhuYkn9s7g493Ts$O4)_fRrU2^5V%?qTu@c4fyl&Z&Gaq{@>g6{{{s9zkgu+=HUdJ zVP0vL;8xR~K@YuaWiaRYEz>CR2@7O>VIBRsb$w}$_19F#O=Z;D4`dY2QGb+NIFx9dwCj``Wo8Ex`M)lo*1yA*IRTIT$aW0?HYDR zFfyJkSjt9?DLZTAHDWP$y`7-mfN$W+WxURjBT<uqt@aIbSagvZgOz|>{awKHNf zs;RRKQzkRTNGwR@mM5X)i!3fq<+N1BC61bMVbno5+ifL$O0@~+cXF9r-0Eg$puiVD zB54r1nF{#>v&#eTrEnn*1T~^1jJ*#lFS%^R#gab8NV0rw&{4n!II7cRCmC!gLE0rW z>c%NOPe8W3zER&L=m-@ELA$fhTlabTS27KiM0&sSWFVsU2PNX996^THUvJG`S? zP%Z$(Y>IlmdZ(@`f8`<+Lv4Mn)%0fJC1sd>1+(b2C!7iPH)lDel>yyGaa#x$5-MCP^ z&!{Mg5^J6up0YAI?gS(uR;S%@mR_mmh)(VnEd>!tyerx% zapE=NE|5vnS@+GvAW~HCQHKh#+2F1i7u9lMrEr7k#&@2a!;PvrLQc67Okuh36-h&H z4swxE&PnjTYy3ef_8!e5Lc9uq5Y#+77%o3zaK+GYrwP)|cP(c{<^JgVkZ(miDK=}r zU5Tid&i>nYPhIx1O`K-<04=yf4@UJunZ)uElV8VG+*FUF4DwzxvD-Wo9<@UHh(; zhX|_%U?xM5m8Q@u&Z;o4ug8;OS>U~ug`f{_-~%hbH>(%sRuN!oV@u&pVGkgI?v?Ht z(W~{KpE11XLD`cmOxr_PZPYpqD-kryMAZQ)nx3eb5 z%8+-;h`t`2b*VUVgOx_l2&tg1ZB0`QVj~|2M!Qp&`o#>8AaIza(Vc|TcQkUj{2E%i z*n5J6vzPUn6Xd@x3AMPL;3q$6J?SoeYEwxRuA|0j|9~16KAb?62MJ^(!ln?O7LMp* z(-G9Wo!=o%)0&t!lSD3YrREozDWk+W8@O?jkj)&pR=)WXm_Z%S=w z$uv=;a6)MJ4j}OgpbPn^z`Y+)zFxr+hEh0|WHfjk+0fqS`}zB8)$(7a!C_9TxYF{; zSo{8|ZT#84-gge`M&Ea{SUq7nsskZS9!b=Tahjx6OIUsc3}bGTvYgK?gt1Ji*DE?X zGWHx~Ru(8&75%0|kKeUSJjLi~hr2Rn6~-jT#jvbcg=O*pKcf#?-xglYs5Q849Uwfy zK!H<77n}dqs`{;}p7kZE_IAA<@ z0YdNth@-6fUn!9K>E1hk8}E-}!_v{fEYND$B(+?As0sgEc(N|eBb@Th0PCqJ{EYHC z$UiRZ({-b+Fc;MKy)6EsZ#fv$v3s)NLZnl1oddS9Skb~3Tm$`{9`0b|@7Js)wR9<; zBmA0+&JhRVR}SqGy!6D9UU%_(0xS&+2Kc@w=(SnF(K@_o`y~SM_*OGb`fg-Xd|6p` zbq+z!4`aO9?C^#*T|RU_qej)NZalzRb><}^BJ_5Y*+WHCGPZ?VDElJ&8HB~J%_n2t za?=Rd$f>1I@TXAkElJr!v{#{~mUU?WUoewpWR9a|2amTQ=ib`aWjaF_jpnT0x(aFU z2`89aSV$1g8A=?U{14jRDk`on+SWya1P#Fow*(08F2M;-aCdhnxJyA{!Cewu3U@8s zCAho0`>lVseec74J?*@&r&?>)9HWmu`gfaO0$Q$$+1wO{_Hv!lU15^Uk8+z&*zqE| z_+`=hG4pghag}za;j%Odmn-@7 zqr-Vt=T_s-c_=E!noc{npCdhkfViqk!fzAe0!@;A2{xilP)p(PlYx<-$LmyV-KO%( z@ihCSy5J}C5Sl07a*^(jtd%b+0Kq`f&;`*x`p+i%mpw*2Hns?U!>fV8x#=jBsta4H zW?tM7k8U-iKO`7Dul}w!jbyD5$^!jrrx3_yl!1rga{&b(QCLv8UrGR(;HKE_4RS~A zg`D~74``CJj}!HUf$dDE5P;`bdV}}ti~Z&Hef}ES@V(e31^n|8w7u$hWaZA!Mn-y&-89TU%PJ2hrk%u{^DQKfd@g|4TxfD&w`v`6VS~ zmlZa!c9+*&qykexm%a37ZRG&wTZ~>4R2EZuf`xba(uW^&lqNenL$C40 zS@~vezs^@(b`QrIEzv4hMy83`W^On!*D7)wnDbc47De<`ZB*FN+3Rf-f{dPfzwtQf zncsYuG$bA|k*7;v_#h$9MO;)PM}&mQE?D{R?X$M3Kzpc+kMro^`>lzcKy|Z+)~&d6 z@@^ex7NU!EXrv4ue(U@(Z6-mt?%km=AK~)aZ2{j0fkxedF^z~8IR=1Dw_?Sw=qUp} zD$LE)(JEJ)IodL#ttX`2#QxZ(j>H3kW|dr4AM55Yf@>3LgEI0ZX*mn*!#C2+Ds{e%&LZcZD$$4g;LViAf+^ zQ)JQ9UdG-@K~BLd9Ub>fR0%z_dy0VhS4zpROhHr9mdX`{=Z zZ6O=Xqc?9R(07<6KWqh)FCE4cbZxTCr2aoHwz;!gtd zvJML!C2tisV^C=CQ-s}9=A{N#((bBJE%i(P4Uv>GS_GA=nE31P#)O`fG4+S^nk6M$ zBdd$JrM5`bM$#Q9YwuzYVH{&FS^M|{PR|=TP5;0$^_P|9Dw7Utxktn4Qp$( z{MpduB3z_0Mazx!78z=jT)y<&nSuR9!?y00R&*N`!#MA+IqEjq|C~BLZI2oKLt9?K9ME+;@vGQC zfhjs@OP3%7^;*gd*TvzgVaeC#a+O%hSF@B}{8P%ii;HRaf}NChMo@+AM%dt~h*Z`B z+Ju(_j1juC$P+7TTIUNu`1#Fg7r=UAA-jr9x5K-_YDlge1P#|XPA`<6#XlfW&?f{j zeL?RpFa8fC3^v2LV`zdi=?t!a5J-8Htn*!Hn5$eU*Kkk?UTaO_C9HtYefAplnX`S% zw9^{%m}kM-|v|n0Za1Yu6=&87kWl49j#gOR|alq zRK*i~sS9fZcMt(yKmL+tyUOv3%a4=4oYAj?hTgHxFZaBS_jfS6%Hv$Pg$tHus~MUe zRUd5S^4okETD>}Btk3VxcxISC7OV%tkS-0fX`YeKRb_7IUS=!cb2CI&vSdXaKi?nm zOpo*(%|HjW#^UKg9v)6|Z5$%TF_G00UCAKAtuZL1@MKHZu^ z3^Ri_f%2EBvxZ9~x+{YTPjpeYKdbF~li=T`S}8;GF{_`Of|e`MO)QgD!fLaI5VqxX zT{a+K_OFh@?YtaN?GDDLOh+ND&Z)2+!fhP^wR*ndoz_%3=SEX+sMz&NmQ;kSdlWp>a7bM*OB%7XZ80g*`qzhwc*~tCXDa4ok>y zH@kk+DA(9yqWfCyX8|l~vyyXfY{eAcqiNoCm9pinIRPS9W}9yHThV})u^NWH#geVV zz`Hyx4i@CKGzHc0{xQCYMy@2-BUyp}ONH)ZmL1tg9G>{#BB)9$TZLgnTv1Qk_{K3z z@9*=WQN9jbq~6!35yD!Lb1#O;p!>OBdi?k5I3x#af4qc;4|jQs4dL8QIhSU+={gJ< zZvvX`P`ZtO$AJ&ps>xgIKI8$v5pGuxX&0?av z^RK6+x0QSEeBAOPukoegNj*$yk7X;-jGPl%llz{Mq{lgrCaYO>g|mUaPJk$0EDL6! z;ZL@9bGo%X3;I&`}e!}&=2rj4Y4)5U^+!6OmZ#<(A@lS>wZ~e~P(6zL|Css%zw*0MQozR=k3@w?f39^x3kaEOIam zlkm;@eqE)(lv=0y6H!f@9`C@1SV;4e;8yRe-qTEuAQBea-WthP=;Q#7cbT6rUt-oA z)M9`4aJ}s1!B{V~+%NT;)yl6lCJ-)dgHqr%+fz>02M&VAWv!XVMcGJdHi0ox-;Vjl zN51eO7N~d;su|v}U~^``jbmVjALF8tVT(!n=+fqtQEO>-sJYm`;(GVJo_t&X)@oUV zPY58o-poVKWHzq6r?!dQVg9~4-uJ4X1Y8%u?aGGmz3q3ytev_EQ)i%bcRivSPr4s3 zP-IsYrf_?MLph__xS(TMhwnaCOMw1Ca}3Z-CRX9KK$!H%H?GyXYQ!tW#{Jldp1;we z`Y%vNj)I5G2E?#fm`AJ|UcLBi8w09R}viaSN*RwbF4={$CU)jukv7aq}bt{6}C+?rm|TPua`lSTQ0C5b%b zr^tQ6;#^@DJ-~Wr7KT{!y_m;myTlJxZGb#IA19nWA+j3Og{tgQdX~6_LdsM#)~ATA z1AX+}&^3o00m)>C>4%R?mX7y1lF5f0LM2>Tpoed%?56wKq}gUr0QwmB#qqvtZ}2=z zR*GP|?b>3N_V|AKnKEu|`(@_;vc&Djz9F-_nRh zuree7Aw#+6Ad)(P#5ZpPCTXZ&@8v{AP%%qcJ{X8DxBPuVS72qHKFl*9mXv5jST_9M z*P$K_=wL;vF|_(ViG0pGUV@)0LOx&qh4^%rqNg?-Xw|5*7iO)Yg%YKtS3yAm37O5y z&yuPB`@`w!Ec&Cc4#7~LN9`$=yU&^>DrU!U@qp}S%o)a}FCkh#Zl1Dy8^?*Ayo&a6 zW8R-lKCQpkV&d!JNZ2bb&w)?3@&?RO#3yU!Vl7Hj|BV6MMB$5f2_LrL;A4dex{}R3 z3^+xYr~-iC7(oj#t~_GWR-=7{@vMzsUCy4!FniEz=74ldFzHLP&l)Y`L~nwCBtQL1 z_*)C5yF*9t^=X&0aH)5i2G_u_0UpGHM(2TX;OG;&yo1hggi2wsxj4(%^hK10+kW1N zXC1#^OP~jVldtCd(ZQ#;#G1G686k!i7NF<^%nv9sffJ;!Vg?I2GbM^trCzjh-~Zhs zNO7mD{g{o(OO+3e2~fS^Rvxhwp8y+jmK_X&^2k>O8?rXvFV+2)6ulOzkwMTDnxg9- zd&No3_6s=}EnV>xw5WX{)Fri-{nw8Vcki-YF0ERb8X+RPUykrj`BIpTp)vhgY#G~Y zG{@MARl;uZZ_u&~|v60K{tK zS3GS11zx-$&%)>}-O(Ygi`xvy!_60X?D43LWUWFMyWK(?F=SPQ%d-2e@oA(;lF9f{ ztOd`B4T7 zRb12OC@WQ~IKsYf=wvJwL6Pz{Zp8Sg3H}r5mJuXF@fvb}lgc(ML2yoXCGErES*mT_ zMyxjuQ8dTme~Yr($VI zh<<+cf&258FZvi2jY(omv-1!VAh5-Yl*gIXTgV1AtAGK2eOyfwUcRb5nlE9Pr;6mwB>ZQt0} z`EZuLB9mRL3==yFz|`b-F^07)=k=5x%Z0CJ$;Wy|7<&&fRi*}ySo5TNlDZDu47ho4bnJNFy78RvV;v8W}T zU#3+<6_SF~pp>Hp=Nzh5K_SUwotKAOYjpOMFelj(U=aW-x%M>;2rS4f+j2c+?i7Jc zA3(TbeC2HS6YLESKUGGt!7mw&pVKuO7<;UFSFvC8JnQniq8%yF0yy}9Z&uMfVcMFP z$z-h=f$`fWw7h!*W%@!Qtn(X*A}8;k+(TX!SyXM!d?D0*O5L2aXFpdu#$)T(>d7N_ zbo0IxeOdk(EkLP?u?6zWV?xn5X%&~MpUI6_#>J&W!{UwL*A3ED#lz2wpxcrWwY4v1 zU*Sqydkp*jHiwmOG1Lj43E5=Ij$5ex7D%e=V;x1J5ju@;Wt2=$8UDR1Y?#e^iAqKn z^OJ)Y)R;h}fL$lG@`AM|+(-1!lR=n74>Tt>HWx%%#px8@~9i4!<3l`P+mL zt#CME;tyKC50)Mi5z046{A1qPZqtkXwM-!2FN^K%b7<>5uQ3mF!f)II@~>`9rCT~s z#^QQZxedo#4as4@OKKJ#T*vWZG2|UTlQFmAdu2gdVmWI=Jlhr&$J!0FUoFEYsy2T&~h4lnj~K){GiK@*~~0fGo8gldcYEkCtJ? z$O}lc>4J>ijt=$xeW0=Hj7R4Fd%>og;EbgBLxR<+L0dma$WRDZNku;!J2DE+KSTJ zYEuIxz@g7`GBV*B`o{h7tczF6krNdzEj1&R6HyIKK)O}UU%@>^S^~j zap%4N3v<^0|D^T>0{FyeSYXf*njnDsajg}r_iny5zOq9{s|>{isX1RusSJoAc%p;Tf-{o^U@P%$8l=d-HBHRLbq# z6vnrHP5D^V%}V|VFE`2nW_1S!Mr!@HEtG-V?O~73pF=c|)2g?j5+%RFF@tmdc1Wbl z6O}CPB=Q#or(k+%Oe)e32ArkZuXYCd?efcis_q<~^fSDcnp%k{;JR0{5Zv+AwV)9P@8@Z0p=PzRh8ch4orO+A zviR>0&NFj#d1>0nTwTLJ2CG|QRgV0E)m$CE&5^wmg9j5QX2yqcv1i^!N-0bjLq|5X z40U$lSwbwI1n$LST5Un?f9wcgAq5JdB=b50% zjiLGrqdF1b`8ob2Yc`f0Rtyj-cx^f|A+915^>Ezu5f1ws+Ubuo#ns{5jCt+dXjhMG z+~p~3uVYJIe7|kG;wLZdXRrsDn=tak!(h7@BcRaQz2Ued8GaR0QlI~+UP5BZ>8uN* z;tW}zz%T54_(^7t8`3G;!SVrJ+NUxSxiP4m6mM&SO-Y5_F46vCV|-xcK)4-OjJ-$r zC90`_0w-U`@?I&L2CY;aGEQBCut$I*f{tsg>%pPHcFG z|6GI&c#8kD;MsauR9N?Uwdwj?pJNw^r9CCMiIm!pW3JnP^m41h8c8_S4k<}qLmY}4 zTjC(<^pp%=h-sBA{{DPh_lY3pXi0jcZhVMVm2~>Du>z$-oMwytqlL0jUTEE*%jA{m zpdof>rfld(kyWBEY<)dnZQlcYi0qwm_4}&MueU7jy_7TTH}ZQntn@y|TW->;(3V-a z)h(A_Kqv9Ch(RnJ`R&`%X2C9eq^DY z5LLh=3jB#4o*^tiuyiNBuAgW44}co((X=tnaZ%xfvbI)19;`pnC<%W~0%QMPLzec4 z$CP85`$fO6MXcyv#9^W-%}gI?*$)mSn`jS!~Hp|E~chz?34}7)0(cG z3v=wZ4n#<32yqXvFbdHfFW$UxgNr^mZX-~~oJ=b(58c%=WqTj~^hXD znUJr1ozjni8vi6g(l(v5+&@L2saMAG#oU6D+SRv(5Q18GmPXGDKdWNFkKecoZ`>wp zd%OCumxo}%y&p(PW{v(?PKTrzy-!~|IFEV}TQqntW!+Y~?2;HKP zQ%vKhaV17Ozh6@-GVaNQIpS_|Y%5zuB4CXadO?}U;2agzO9}%bZp;z+9&|f-6AUah z!QtAtm$0m2?ETEsgH#LV5B3jHF_mL97Ep1oOakHP&dR6>0XlySU+rVmV(Oz(9-JO| zhJiI}ZHi*pQWqLkEOFQnCtr%)S+VY9>kS>*SK1->I7KPjf8?mzX<@-LE3=oq?co?^4%ou~$DHe2+45cn#pr8mmTm3&cdgh~B?qROHH0k+7*bTtd!7S@$i~`}sh>cen6^6;{Mf(r!W4eu4<0qd^ zUC?;_<(G9^xzu1zhGyl$s3KvP^q#fNhC`Ttft#bl4~E1;?_=<=DH6_pOJ~KI7kDKh z2^;%GhGxR(=`v|uwjg!-4S}Xt?O!ewu^##gm?8|j`y$m7^0zenW4f-iC9yL#^Iy!W zJTIw8z3Ufy5qya)Z@dbq9+|K4-z9%(2?>f`HZIU;pM;ahSJ5`@-?5Si8xrVbJO~23 zLpL2DG#|h$dVLe;w}$@s%ix$S((nKD651CvZ8v7W%Mhs5%k5O+ zI69GSD^Pgtyzg8{$#Iq#`$sib^36|2q~#k3(kg(HF(B~f17{Ve`W3+V@)hph?kDTF z_3koe&+zPobF*-d|gTsX}A)%g3xsxd*hhet*sS{p&nXK+vi50V=QBdIko^*13Q zj#sOQ$2}y%TcX`1fN^<(czJ{|)yId7&z0HWV3781Nyj$lja>LJST z`+IxocIjnaM~u94?uj$fh?N`)In!r=wAf556G!1&^V7-vk@06@abkCbiB*yxREZ~s z9|pvFC=@Ri6z7%ZRNJsdsVS$v1537P{IBz4P-;^YAzAM?#@L{@s0m5L<#%e3dlC3` z#sa}{#}|`sIn?zBM;kl?$;25CyWZR+%voCr?)Ek`AVk;)jLk9LDYTe12pq`q%B=B< zsv#VEY@+{CMwK{weva>aX-H*u>z_DF&(GQRY%Q%GJJl5F9h{~`?4qcmKG6!cG=kYG zJ)guvZ0PK(mLYhtYYw|>@VlobX@AQ8rKNE7C`lsc;0OabtQM2sp`PX^HAifJP^iXj zdoIL}RmwvrwH4!C#tk>ewW04LyLoAKGZXDbqCbdq3hB6vMq*Bex4J3q@X?~T*WMYh z#IVl_ZmZVrOMWFJBw)2O5(j6IrY;gbJ!`fHO`8yMoon#l*FcJusm(F#E_b{cVzESu z9N92(Q@kVM6H_UZ!Z|H}s1LHtSuTi{SycPyX6nJJlguiOvBjv;M}Qi!geL>sGjC1U zAVnLRWx+wJdD1wxe`vHNBx-F!iTpL!Lw_Cxi4?#O7Q1cIU^Y)(!;w+%^Rm3&Y!p2i zReEfuy`pf)2F`}sid!+g=`ZIAr_*x~=fh&8$|;6#`dqRfEyyZ1#9Y+~a(!O5w`VU) z4hTvuuDrgPBuH(LUg^Bto{pq8UWz7rwsX_8dIrRNPjx0m%= znQ!E7>)t(tVu&oL8VuCRbH+{SysqUPT=u@(kAS2dAN63{Pb3~93Mw(bJp~cz2=#jd z+<(Ws7f7|;7?UA-xEX{G*R`pO*jJLYIVC2V)osv-3FTj}zaHRL6DOyy^qcjk`KtSkA-K zex2>{4D;#qT?$U&sy=UZhF&f73UtznMj;X=Y+F?M=_=qiUzJ5g4#@$l4VOcfh@~&4 zT9)7Drnug+{aO+wg`t2E z`>O&e9#C$mLP-DPRGzn@QB4lLmG8Kex`p_L95r!dWxpB<6-Qa9Nn}lFzVg#HmHx{E zE55nzCuDsmOR8@ZqJ8m|yEKn4c~(_NxAw?sQU4kJf1IdG}T?C+=>{rlhb6 zFVih?8HxV}x-fZ+qNZ(e_H}518sK`}?xqfmWJVVwi>TlXCbW$OA$$c_)$-AMmSsV& z226YmZdTSG(4h6EPm?5a`9WT3hzlf}hLp$eOMBxq%6KE0?8Cj2e$?mM(kTv-B{Dw3l2wTqMas`GO zlU3QY=_0(5LlwGiW*Gjz148&0XNlF zp@YE|DKWm$q|{^ACfWw{p1&Atlp#wOuYobqMp=b;I;mYgKFflhVRoNuNXG~5x{-&| zPhVEAnKDf?B+(ii9V<|~H(Jo|W{o6UH5e~UR=%SWW(AF(1*#HHU*gR$S$t^fbB*&H z1g*Dk_0+gAwT+pKU={3nXSI9?b@H zDGghjGC8iZXB%>`|B4}-cPUrMMU!cCEGOL&F#WZ4wUf^aTkg3Ydc}11E-mCwJH@v5 z@(&=i7|W}Vf982xyY-j1X-`}Dh}-kxWX@mj+YV0p!w%Piy?lMi>?`a(;GI$)sr`ki+Z{R6|A@E7}7L6>-k?7 zPo3SK$T7rfQ`1?p@$bsCO|dn@XC{=aK05{?gVxLi&n_a$#Apht7v7T1)1E%naahnC zgm~$e(CF{~b@kJX{~PGhh^*f;Ok$Y7{sUd!SFWj{3Cumy#c1--$p7#JpN?+0>db77 z;;X5OT&MYcm5KYzPbc^rsX?^6*YL)eHw!UNG357tW)4<(dPYq>(nHF zfWc_pbflZ%BTnM!4T}MSK{Iv0GwEgSl=H=InU3L)q?GbG92=`{`+Krr!Oc>VjHv}8x~I(v`ezM8J1*6F1#jU`zLr04}npQ z@U1}g3laA?Cb?Fo z&TfxG&Rp<05H0;=VA`07dgiSqs_s8GgH>+x>KQ+q3|jB(VFXre``N$hT`- zX<@s+%D~mgBPpmuOrFE`V(V<)ZB*+-3}S>oOc=1Eiz?Pew{^u(z6hO_M4k)x^d^T> zT})u=YBognM-H?JR2c-L3jbOGHO?LLxc>~S(&~HW&`whY9_IkNsFss9qLgckQZ*qi zh@&?Jf7U?H#R1F0`OA`$fuVVIpU+85^O2G8L5<)ebZ%0E!=7YB1PNFYVc&nC+i*48 zl~R*M40Mxlo~C_f{kA3vu@BJ2X*gVi_uohv#{uWetku7M#~@xZm?EJpDn{<=zwG?L zKbnzW;Xz)|J=~DGwtLxg&~K|Z=sVRQSg6&Kno@$-F#z@&2v0iRn)%tTpY)W17;@YI zT%+Ao?-yG`w}d`fiBy{6!MbnR`2K#Yp_xv8rm8amrl}7y)lin$xrkd^q|H;!`5kRH zKGA*(jLZhcyy1!jD~bgJ`Wi;fZTOeXiKCoqr*Z$h*tliOj0w zvqFDdEet-cig+b}y1!Cu-?1$nSqFaG0@cN5TbmprOX_eC8=qE8yT|Tvanv3~hlpoK zCD&~=Ha=dC`;rnDIGsi#->usjVBD)bu*V=;_GOTAm?B-&>i87vpLK^>CzBvRQG47B ztd5>U=pT2dE>9hx>fd9XlEuWWk+C9C%GiAY&j^Zv2j=?{mQFXngQwF(2A`w?7X>NI z_-KuCs_77vm96fD+ORAIjJ(=x%vbe?-d$SV_E(HWKq=U?3hI_Wc z(W~}%zma%reNwY`yCxby*_MW@BlhJ&hh1ZAE!rb8(0@W9tA82>ce&*f5aFDw4W-a9 z?`#i`W}%-%&mPB?k=!giWP|iI7yXdbe3p)`=2`7UMW}k? z+yb9QCN=uV{IRFMx8-Gjt+HAn(u5wqK4F`AXN;&MRf)YMDFI%XREg$%gEJkd|7nab z{~B$_qgpwId%C+m<;n3msYn0! zrHXgXf5}WmpuHHEbyfz<1s_!rk@BGWRS>{FUS!7)776` z!FPm4x37uxOCNz~sdKX&9=?~C18?kREfnrN1%qMzate>`>x^WkqRFe`{=DSX$qV?ou*gj|WNS%^3Eb=3~xcq=Zw95}XXmD})h z&8I5kH%PK-44L}_a(6lR#YF!4CHW09kjpRdjpJ-e#num<^R9Kn|Is>MGU6Qa|I@Gk z==hkUNsA_cWX%CZcX#$Cuin`kg2A!Bw_vHwZ}?qe-J`JPP#wIKmiopV|NHjyDX9KO z0e3}hPW(>^hhCZgt5)N6%W5J((9u50n%!`3XrK2tsKRr{uwh|sllT8rT7)b2pN~p@ z6Ji*KEq#^K-0PVz_!k`8F4?=(^hziU7ryc^CQB%Xl<5B$0gw9leq$UFwU;9{=mNEJ11GGoO;jX*yl@h8KE#~dbPc1B3wMhsqz7Zu z;BcrOHu={B&kc5^+(2+uvfU$9-WTKg2)w>>D|~$+M*It{i@{R!rCA;5IdQg|MchwL z35Tn4@8iyx`m2@XOandh%%7N*_;|FA`UM@V2MIrQVVYOJ)_+gT+w>=OqH6Gm(ZFo@ zgu74PQQ=2+EgPI0E?V@p=v^Z`9s8X?jBz$bpvcx8qVGuuBkRXTdgvL#svlwpZwE8WzWS> z!C4e9Ocv$V@u+uSwSkaFSdNqy$Hac}`bp8807MT=)Z-o-BV3hl_FiA$?kn7l9``1- zHM1E|+mCb~7P0Ny+LenaK30U*8<(M$2-=nV^1;QPAytw5^>12OvWDgk_y!D z*14HWO>3I_PaM{R_ds>vD|GH`-Q6Dbz4ZdMfvnip>O9qM>IS<<1$2lk@yfLZv;JCX zw=5MeyA;IE|ISdaIjD-66&!2V^dcjiZTw2+J&(IGJd^8uZp&CXNQ+eKUHZ^ZCa4Kv zv)3Z*QZDR_I9&JRg2QC=pm4T?DIi8SvnTT1=!re|fXpm+?P#JiSKyo{H7Sj>a93HZ zcbxUW5~fyOOiTN@REPHSA$W5g@ZkAHdkizc+t1C+<{Hgn;Z~?^t>WRYt(Ke2eHI>e zZ&ISd*N5I^WK`Dr@sXjg3^ycLT@SWIuQZ5p;tmhCO#%uRR>gB*Q9`bT-Y`K90aYI{ zp`-fC>)d&A>|i3@rv2A({GTsPR@Of!OZgmGOG_;hIem&Fal9^WRp<6=@HA|q;HvvP z0Wg*=c{fp0Y$Fzg|t1xONV4RQtBBf6R2)O=z-zDVcn$fI_Pqi|ZNe|k$;^F%& z5{ylzH1*Hk;n#nskw*#hY_9q`BnUsH*{SU(#6aJ@;K}qENpo3bm(Qpav$JI^oJ7&9 z`td`qiU_FiYRR9N$5pyJ5sRzN&>0Ee;1oi_ec>_$eQ!XkV!u%n;g0d}z{4#~d~vNN z+j~M9sDyv z&$hD{A=9yBR{2gDsIBNo9g>o;n~h8ImpzoAO7d3jWq%!1l$hfte_8xjxjhDy#&0i= z)6zuBR(N~(@QU_4cC@Oi-9d-cYrn>P_jC@KkfH$XtA6o;!FuIP3{Xq#JIYaCN`tYx z0TH*}p4htp_Ihhi?9IVN6$wiWcs#wQ-=xW=>Gol18OOVah2h*ZrvUAo6e+n zxh?z|Y$}b74>%{w<=Y+(R^75@cm_^MDyis7yuc7PqE<9aFFKFjx!te?>4wGv)=@2e)RnrLUivumt?o0 zMN3cV|G9E}Fkoph>x2{^o$R_t(Cv>a*3CZQeX%X<9sCTIJG%T9;s~w#P(z<~O!QAm zr=0GE+ZSAVLvxh1Nbu*4#(Sp3_!OGt(7rjwtt4yuEEhC?VfO=xB7|W@U zI0SqK`~7o(hC&3yoxs-tRbswM#Pjv8V?)=fxE|aLtHb1=^x9t+u#$#y{$SD2oUrLq zW#!T|0OSqpmwb-l=IU5cLD-xZE+2GLZ}>Y##XmyASk!(%s*IK?ZaP3?g~=c}XK3O_byorO^6-haJkz`5}gbf@&Qsod3F zS+$2vd+WUnTAz^$gCZ#58}$thw_s3h^dq}Uu{YQG$OYa`JPX&CnFP_t3s(uLjy z8R%KZv;i+R$d9ZnOu0wr1V?w&v5_!4sIa=$2saVG^}cbf`Fw#ZD-(T`ffhUieHMH2 zt{|W=^)Ro=FRZ`?!iH0Q2u-Bu^QVzYYYXb22s>Myp4jRCp2d%xh=z>>RV)o>ez+iw4i0F75U6=QtsBR7Di%MVj@2+Gu(qf1XE3;PLCLWSi1)twj zBK zJ_ZM+S{iQCaYI?U`JSnPHj7AG?Fm3JCV{uWuoVuJM1?OQrh-!TyEjL#t7MxSs)nBo@#1nSBR`yRt*Xr`gS zRYMG4GY(Lk(kdJ;=b)v)!yo(74>wXmRTGKx1lPvMZrVu{-j!}`y^~_j;-`|TalU;4 zzQ33Jzy&YtgNz$7ZwYzrg1CMbMp1ZVh0g6p>!)@k?S-ol{8;p>)AdgLmBz*=r=)s5a98)MGAd%{HC@Y%&noM z$_s5xeWcnjEzQS5qKT^c>M~`KO2vVyY$uarR=WAs@)3Pp$P?egv2eSmOA(?&U2w@F zU59TKk&P_w3b?lmt6?+YFYL>8AaYxi_CU4_Lg8ZbPlcg|i`hVw4v!q9^HUY4p#&Ih zO$GbN(EaR~E*E;YCMzDPnFxV~pxyI1=w4GW4e8BRA0wgmD@sH4>uo%SHrKC<{aG{= zP9%c@W$)KCWPza@H!Vaw>Jr)+yBo)Emc>(!0It1S5_&6!4uREv%=e={ejV4udHT&1 zF5%I5F@_vFrUMI;M-wbhpWrZR!eYu{=0f#>LU&AfU8f{6q_sk2t-YQ-Dik=$eVf1^ zUlN#^tW~3fw{VD1+V4qkx;WoRJd(NpAgp;JMJg#tnSBLtPWEaOW{*WPNKzwU`ZC-Q znUI@z62&?k>i+Uy0jMUt2+Mh`h94D+Goz7n2f-#*+3co`HIS_pB51VKr>ohPx=vEH zuMP>UNmFe)G`i19Va1zM%_}=Gv?b3)l5l{v@&sa&tpM`d|H6i(E3?CPmr&)zr|#BM$2%^!D`oi%N&^ipiL685bQW>4gz%9dGO<_Nu z-%u-NI6D}w>68s> zo!Bbc{5;S?-qRB#g2#3Kc(Baz7~Hc9ox|mAXE*K#d^RyULdHg=Ay~N0CTcxQTEF*T zZkXd~I-J8>sUHk!jM`Ogi=Hk8NYrmyXs7UKc(9ar23ldmr*2JMe8# zxQhaqvmQVCxY6AerUC#2!-}-`{BH-L%>BLZK2-$V(FN>XIsJ;5_dgz79xtwIVMcQ9 zwz}Qu_Vky4!g^%iwKYbdK+*(e259L~XRIFyB1(BuqMYk#Sy`{Ial|RtJM3PKZDlpq zhgpnPz4F$Sj;wP!zO86-E{}{BLYxj}mZkN4{FeP3f$!0IfKp{%X83N-?ROu6)al(5 ztY_iP-=YSkO1W<*(7)0?A@90bx-@0(udKExoYxg=cB(+is#-7alZ=AD&Lwzt2a~-U ze26ihL3V{6L5ZxO#A3(ZJift=UuyOLHloqoNhYi9p_-U~Z)kdDv@;FIm2S8%p@~kKA^>B7h1%}j29h+#QmbHjN&gbA74e$c?sqj=^Qa+^wrsHrZVIeWAO_7h1 z5wf9#EsMm+b_X&sq0gSq#tZWXBml@O273&}29V5N0*5lvat7x;vK@x`X*g}KwNlyR z(mY8R(yX>lD~e_t&zn%bE(J+uY{?saNF^&RgLp}CTJrRC_6dEBtS%C+-p5X>9*Y_X zp~RDQi6g@#7I-|=m4w#!IV2n&(a$lebSdq#C@ZiC-Z5GFs3W_Da8{~Le<*HNi8gbq ze?nNjR{3oLgsM@rODr1BW+wq1D?eo=GHgqE_FPz;Ln5(W8KP;O9UXHJ$&%vdi;RUw ztf{Dn`jgn1$Nz%g`mBcEe|zNgdmc`A#6SmGDlbpJFl z!QqGLnq(lXgpBy!w^H{*PX)B)*7rCRvSX&Zy^E`bxoL1Xvtjz2Dz=7#>1&j2+iOBo zChl7Wdjr}@9 zvhrR|L<0!r{8v_7Ec9AZNMTd8@{~9B-O~<f3hB-8{Q%?V2XT<`f!co*V9{?bIi(-WXxVzPhfPAHITIBQN) z@e%LJj)WdFE5{w5pK#4x9%k+LKhaAJ#J+C}Dt`^vK9f;PmKC!FBnWZE5UUvV=|Ejg z@aL8DMy@IvIhc~QLB4;r9oKKTW&j!Xn++c^{4Qyj^_L4NKCKN^=PC4!L+Gqi_k5JM zY+p`VVCL-qO*v;T@4rcXN80m-DkQcksA?6PCcSJpiOD5AKDvUliqIxT7^y-^@igWK zL~chMmRC^TQyb@`LT#hS`2Ew(C)uYoz)wqxnX>%E{XXiYgPifYN4%%~rD7qhCAzt> zLF3_1*zGlT#eV3VjNd!EsoN^(rc1=VztGY0=jF;X@7!|5+tsm?>(iuma9`h~5|*|s z+%oge_uhOU>HH-RnR4c|6u{~0zC~qtx%|O>7ipq8N`3rCoUh3+0AzMNJ{Dk#n+jMC zw*zS?DUY>Gqx{*=0BG3e(HwW?+=ibSN;_=6@&h=##DRrSvRu#uF{%lwt@tqtMvY=7 z7$Gtb8ah+BH8i&A80fo{!oS179+r*$G9apr>Q%GGl#&mI4$FERKb5NM}YX2CvSbDQTJP3W+t$gk8=F-=| zNi$-)#S$@wll{x=?blS5>06GR_Aboh+!$OP4rmR9S)%$S%c%`G%tR6Ju4SE@vH;x~ z=ZOj8_JH&7T@C-CD))^eW?yz4n$#%-yU^AbTA-S3pG-D0(8UM|theoK$3lFF?)#DKspCDEFtr?;vhWatXj)6diuidY882#&bl1C%zQ;vkU}Qh+Kg+EZWyc= z*8a>vd<~0_DQq_z!uq}$isugYsaEtg<-nxY24EK$Ay9Go10!_?!tRO!UJ5~iwzNnn9^8t%ySuwP6nEDacP(Drf){spibHX00_=SM z-uvdM{^2tdc6hE2Gtt= z?LW_eCXK}cj8p7T96R^=gn`vW$3eKI!>ivb-s24r_$)LsUG4B`!eQ!Gr3oxwbEZX@ zZw~`+b_?7=HSNs+!x9xj@9wK?q8`72uruX=b?^OSU+lxtX<-@)Kj~p-pWO%$0m#aT zUtl-e4{6H`I(%1`1d#%HjYY=!UXyUe&snYCB*F^~n!DGss;9?qWnouC3>2_`^thmV zpUZwB@$JnAVv>5y=KvJO0OQBL!{iUtSp2UTaYSRxmba?!$LKgXCKHZlHDoO zssAvR^PLT?hG2}YTsV)bj@Yn4&F@PD{h%PBeTf9> zseS$O{=u$LWt$lXFi9^i9jfv)T7pW^&5S7Y&lcHZCbMbA<5)@zL)|?`k~f$}g3$9> zKyC3(D0aYC4XAHiZ@_nN`#2%gb;L9JwV8Xw1&8IM*HO^hYWMHNbJkhyG(i~b4Wf5n zI9(PRlyY96nn|tC#zf^0QSa3P3fbe?bgUkU|B8MZXn^5F*Li6nH14{u4_?1Mrc-nl zFY=@U09T@l;V1t%Mo*@6`!mLQMYKu4@zhM0rqW3vul}p#YO8O6DP?(>B|YbpQZNU1 zsYMSoZOk9UC>u{Lb=3jyq5wNk;(!?*xleywTV;eTx}#3f=pM1BpxD?BH&6HY`vtZ; z66`+!jMrh%jV*J-|13+4yByCA?yu(01E)-6RnyL|q)hY89Z4&>o?O;iEbzmq25zeleRxwQf0Rdc} zTida3x->WIhyh-5ry{TW_YEwSpWHT&f@tn*JPN)H4VBD^49lRB9IaHE{!#)D?7J&{ zx&Bke5=Y^7bQ$6GJ!y6yGj`kCRMu`_7UAbsP0^GUrNk{x$y7XU-a7gfy+>U_AwSN) z>n%BF^h*4J&&BIxi=Q`y#lFlDEJvD(U9P%t9q7FS1&Y;&xux$Hebmr??G`6wA~VhP zL0zk`6#Htvo-i!_&x-S;ixI?B$o?)Fa2iOcJ?Q)G7vHiZAJ|J94Ue(AMY+E^>dh_g z7;iz$n8>cnPCLf-qDRBY2|ZVcGRbR#!MN)T7E+SWBU*e5lAS2}8vS)-*$%MZzlhM#^TD2AwJRlxjTwI;*bSN|nv*JZXFtK8~8kzBv*j z{k}8Cxj+k^$3W_H#b~sO4VFCq)m+XkGS%8>hhj~lc3@Yvv*(>wkaZLV#`?XhLuqYB z3i|Qw%e^$?>H6m1A)5N<8G9av@>&h&!td%v0ovg5lC#5Y=5r|64S9G>L|IOyGOjps zdcTf(cx6d0InTO(Fz^((vZc0ZzH`KsY(HUu8D8wD@(z{(5}59YzUjsd zy=YnwruKPRHQ6HH9Cm7+-BOR8EhiM5>|$l76mo9W(RQ)>xKCfNpUVWHF8ZHK&(ap@1HD&+7jFc$~fCAo- zF_#1pqJqf1cG0=+!QqPVS)61h_y#Fr+z0Ko&mvh0J@j4hWil6>CVhzvzaTtZH00Q6 zNgyu#M>YTVl#bUK3Lv?l66XIvo!eWXIzwPzWVm`vni*W_LOzj1c&JcT;Z@eU9MOhz&nndRC*r?M!Q0C7YzQi<$_zHCt%(<+Q4<}vM#aaE!$Zqr9FLy* z3@mOU2>r46M=7(fQa7DTQ71IOZRz z)^dcu?oX3<(Cbmi838cInjMKa1Hd+9UtIsPwOv*FypAwZ?4EhloklWS6?@M0-`8d!ht`%KbgAArna@O3G z$chG0yfvrF8hw2`Fn6qJSKh|>E3%-^W`QZ_$MHdoq*}sFXX&@#6!53C^W2vx6S3PI zY0iMx;xgNdtm1nPV+t2z$*Q>QZ73lQp#s;{A+6rBW{J_90zgtL$V+ti9!T4IuR1Tt zb+1Z~wv|Y$UY$^`IjD^l*?SrBPt?!mhnV!&I|}mpt}m^tY}8B=&P6F4{4FwrBLyBu zaqhci&P+lOsVHS72bosY_I&Bit(iqled5H}Zq#mG_^#M;t3wi{VF&n|9`K4=!k(Js zoUv;lAqy`0g-EE0g~^78kIIeI)mbUE!n{bi;Gr^E7%@aj0`ALCqPSmhl9S4)^It}j zMlgV3SJt?TVR;j;?_#lBd@?h6S3*y}#cvFI<^DBNsgHA)RJRkTHqx28Y*YK`FC!OA zN=Ha(=i4?e{2s8hEKD9)4i7YJ{0}Z@t_q+n>4_ zi#%&*#sii*uMv&99!!z&O(@smp6)YP+!$;UIgE+4q!I{FUQ$M})XbkzYcA3bbar5t zuop%IzrDkm?JS!sEnQ==vfA3J5M~A|Fw*BGS{Sb@-5VE#WGsgF|59*H{66I>{(9H) zZus`&DhYLWgb+1Ut98~xj zN)szvCr!H5@Fj(vP?gg}A;P9FB&4TvyYD?Pf4-Rr2G8e1-g7Lq*3tDqMC)1(a-A3dqSTxt~s8FH7@ z$%KZ6V-q>*R@cMUlw8Fv*(vWI>i*t+!*GR3nf8%b_zXACw^P5VsYT5gnW&paN@f{% zVVLBD7-Cv_Sb%iF-ozc~o=mO2LI zSu@&K%;cPHri=5HS`hB&kA`s&&o(aaFtp!}#nC(@^h($wX5uLx+`Rk(?lwyj9flp}NV2{jrkDCH`k0;EX%PyR4aA8TB_-Da_ z?{M9lUIxe0qcu^8sN+!?fiEb{P~q#~(B+(l_%n&V+Uc^?zU4g&Yp6q)hLy>htV3s@ zmbi6TxEpBZjFir#W}?y&bvwqZIZ*6$X>2$H|KjI&@AZpcx9$oALcN=O4j@ z?XZ5AZ=N1?iiKas$&QTH-T=B zZY&DBkHbirY3wA8N1LNeQN{r>7js0i8TH*SGu^?xo-tbKS-Z0seSuWQ@>uS zkoyzR6SZJ~v_os)o*-JAd-yM?+zPG97(%rF!%Kl6&CRp}rK&Jfqnd>ba^Irg?5gz; zZXp3GsI}~CA7Yo!lHnwdYl<9pUr{XFT3!8^Zb?*y9rJlxyd@Hf#rmb+sK?u@M#`~X z0DGrB^1AE1u-oE3ZLmAqz6DJN6WX`+T^*Bparu1~b5v)Rl@(*a16wryovq?0=tBR$ z$o)0(6mg*_pW)_TG1x-CuS?#*)lSMmOAGpxVgk%1MwV%3@wW<95Xa_y3aoW)lCekK3fr1E1N3f5~ga z#y%E3@CkQ#>_zdY)w8!Oc3F~!2O?n?Vb1`-mXiLny!~jkmZVa|KZl>Q-Titf!#}JC zwa?@G7$Cd-sQC8ca4WoA^W*8~(FEHT%L&WxMtLqZ;$cu#7YF1BU7MgVR%;wocyHko zs~4=_>6-zV>s#mY^irrGD&fP0+giyKigGs9v#)mRDeqn%hfZ@Ga=ic0XtV<%(_$%!%2fNasOy;G7 z)+q6mI<^nsPH8vSF7o3zbQPdz;_@`YQr{yP=23uz&hgzu)Fy)%%CgEheW9PC@`bxk zPZ={anLS&$DT528$GPzK)DuNTlV{WFxJS@#e~?S!^_kGXaKDd_)Qt?o(Mh9iA#Bi7BENy>_Ia=OmcI3LN%?VOlzohnYkJ^^yiXmh z>(@?59s)*DZ-F>xQ=^zPB@v@K$N?rzV6bvSPjJs8LMkZSwpaQa)#ss!g=I>_AvexG zqJle%&#f3UlYe&L?*x;YQ{vDxA4R3D*I`}o8`bAl|M;M&{x9lFhzsfPj({Id?ulMvGr$z)sY zi+1Nl)F)xfO_`e-2sHdNj3tq%TpBJgypnOd=};QcW>g&{tc}B|3L&#sV=?e zBqxMvlM~OyZG7 zFFL@)r$oFTQaL&IKuXbjS40*|LfZRY0#mZ@*cZpNV|hz8Q*c(zdEV{PFyo0ebJOq5 zFsxjdf70XJ9QS1#M9K8Sm};fW)V2+A@RW*c1wu9YcL1Hyi(cB!olM_uTT&J=H7#r* zJCM=j)RNclnHGBWQybO78H^XCwewIN4f?4IBnS-JO2LSx`Vcg+A z`FX6RI_lvf_@|WnXf30mYGJz(@TddO(jSjY1(R~=9rVML31@Y99C?PR*+|q4-}l}s zqoUH-XwjPGSZ+tCT798GtBbsjs&R$r?hb2~k&KMhvE;XeMP)Plaw#!~-3TIOgwVUf zT7wRmiT@Zq8(?pK!S8vLL7?UK1acDvICbIv1(NR({Y#qa?)H#DF!cp3*JbVT{_az( zxi)2;3pqTivdGCibk86j+2LE(0~?8NWy-tdWjiINd_SeXq}q zh>EpXB)DM|3)n;?kB9zvUe;aln1pCbqK+k%_@C+S(GB@mA--Aaw@3Dc2_<_A`rEzi zPFr1gULLHWD9Ugu$-46wM0nWf&YD4Yb--)i&EAf=(R@D%HepIStk&CVzpwvg{g z+)s|)X)(Z<)mPNm>!M|s9WKR=Br)@>0aw@M!g zmE1r$B+6oJfgn#Qw(VKlR#%U?_u(C?*$I4qiGO#nWB8kGh0)e>!7dQgV@Vm+t4i6* zVp4;P=$Y*2iy!7--47$W;WznDJoJCA}@Jtr7b9TLZ{5(gg#;BKBYqEik#szO^RlBmLQ$ zXQzcG_*E5Wyc|?-KyojOay++cZq?i)yffbbmMcfUyMYnz_TDVU-0Ud;>zQZrE<@ZWLHQJp= z_;}xbr5k@2x)WfgK5GPL#!UJUnN$QS6_|2iledFoTI5YZAa|;EC;J#S+G?OIRc^8M&^Yj|C;;kkb8trgHb;a~83wOIby|exA zj-DwxwfFKVsb_?5?|X*Y?@J0nKliL@)%m-mA?__ryH%;~#*YEZIKZbSRzcWq1A+w6 zie=aF1>a=UL?>eEPYGu2*tCy6p)tf^{yFB>YT=B=ZIlAldq#|aDR`aBD^uAzG-Mfm zX1?jHDtGcws&F^g-+z=yuQvh26=IgPri9Ht0n;6W~4^XibJDd`IuG_@;zMEr1?-eKQ28adGzV zz2ldnS7eEes-yC^3QiPnwPT<(M3cC`S52j>!@M-N7Mp zAn?c0=ko*PzXQ}E3HZ@nx#K|}js~svwak?iybj%QyI)C}7n_zJ-wSv}jCSNw%;Gb9 zU-rbp_du1oa6;4iBJ#gdmJ;zK892NV$T>IhP^FSjA~Y#CLbk?Qz|vIQ#~rH_&O-fW8p4O^+F^D=NSM`n@_onu}O*9E9vP7}clh zH@(^=tV6LI8#Ph?>%Pw5h*cKD7a`ou?PC14&eu{jq0X$73J4^lEVL88>&F30PwLZW zK5BJx*cMgv%fZH5QCDSRcEb7S z=ESxxQ|z4GzK~0del+n3Go_mkq_8_B7Hp32rgOU#jFtg5wVU$3yvtom_&IM>7xz2_ z#e=+cW8m|&c*S{gI(smN$|{ETA`tC4LId<U|Eng;k=mFTMZZ@w zQNR5Q%KYs~h{BSLH<4=4i{ZoIL1YTam*j{^8pXjdj<|HQUa3+iZE2`>?w8j>BMJCx zo(ljTOgX~EbC+OXx+Ks%COT$f~#kLvG?(`2CU@RdR zFFfwyp`1AF!bqU2M{pea*(a9B;9YO|CZU_Yuvud~Zg~)sWie-hfr$srn8}*W{fSiR z!>js%HeEbmRjM#)b>{b-$4r^EI1OPVHqsnG;uFT^YH>j@{+)`CkEt&&_k1s1dFs$p zT{vX!IkP`;)7(1Jcq60UFh=`=#wT5J%{yfu>(p`r%ZL|heQksp7mIaLe%mdstyl&_ zD}95@yre9xt(b$~UJG`}51T!ws5Up7{$6g%qcL;{^7{Tv3UD;lYYtv&siL!e(>+-K zv2cri5NGkc05P)6*wS}%vjDY-D=1;F@Vmj?67g{yg_((4W%R>?yL}|-55Tx3Wh?{w z29kD{a)<$0Zw4HAur)Q5r3N3*X^kjo2q01fvlQn=eNYCz-2Rx1Ox|M~8VH=i{_P`! zi!!#h(P*RS6Cc)%M}~Q}RV;`3Oudk=7mJ0|A_wpvd~3r2NW4t92ihI@a}?Dne@=*ZThz=cc-1VPZ&tfUG<8ez;*|E47P|4x+7BRw}ysjr1Z9d z*>T6(y$$yB0z0?DexO}nomQ{vAao@1vmzL8N38lpdCeiQ5Mcs1E?oWvi{_m$qX0WNQM&A)I0a|8(M;^pLm1+mbot4!75$xx=#sPA76Px9wV9l zVc(TUNL#GL<8_-I{_s|65NABa!cIc{We)tB(HFpE+~i8>Ze;IS)Hy{gX(^y zh`co$s@Q*V;aSu@(woCB_c$Od3&DgJ;XB|(cp}tbOsF=hlV1j!XV1^=QU*k?BXq^f ziUivlEAewQJTK$Ck+c3t$&bv2yOJtDNdvvq0gA7}b7Wt(>y4vuD#!F5=c;%Qm@YqD zg^~vPN{U~32{BEoI4_OyIvDJV(xkwCcd2s{LF8RZk2;HoG5XaX8ND@NI1EuaYEXnKw5w4VY&jMA28nb>zA5p#)Qa5th62qa>WN?DGJ2Rsx{{D+)Cx@-(J$=d4rNtYA#Sm+wBsI z(#1y4Yyaz|Q<>L$xT6Cy@dZX=GShI`Iv}7+ZalJ%wLj=%8vwtrBcqUj5qHTEt`=Ju z7E2}1s06ejU4gM!i+Axgv1rQcIH@VT0ictqDSrm_*7x>a$yivEN{NZ)9|X}O-Bq<` zt`#V4igt^GE4oCF6V9D?&|WVKHI$uSk?_2n1g7?0@s9m1GP_Q_UpoGn-GJxh@K3ziO^dUWnX+ygO5af1vYL8ibHq**8UJDp%Pu^vv5lVeL- z>hH^^;GAu=M~=6lvsX_1VM|S$Z({1zRZ;kIn`=!s0DZeBsk_{=5XM%IJlup(+051_ zRlm)&p!^rf2iL5)UUfLh=c#WgGQBE<(PqAZaygV~T5hDwr0_mlHLhd7Z3to;~bUFWO;Y@N=eQ zS)F0AjE%!>W@CGApC6RN-Pgxd-xT8o{Iu+GpnTFdnNa>s%ka3zVwsp{xfg~`sFP3^ zm=%;=fZ8pz*86Bob8R}HqXv!#d$^LU=>L{-F*78UlpQ3U#yJ>gb2|1arL^85-q#zG zI*Mrdm!xZCykmvbKPP$$)$_Awut?|5oOo-(blL+S<&xBWJpV=G1NffF4(Tly!?wkx zqceJXow$~~*}_X*0+uSC+ahntXL1hp{@M%j)U-Yt14nq+YW0m={;)<(r_@k3vtn@> z-`24y8pLRI#TFw59o+aj-k<-Pvm=;e{k}E~@%811{XwiFdxm%wphOLAlMtRtdgB*| z(;tUV#@`{qXKIH{Zd_=S7hh!JoyZiKZ|cf=+7x<-!|YaflaHFEFr+T~tko;1&%Zj< zEehlhfjwS}k!CdhMRyZ)=-~Yp&Eb$5L9G(?Cnmf-hhopTj(r<&=T3Pqa0(f6p@=Tz$}2Hia`y3yAdn-mL&N^&h3k0?cg~)E!t{)L0e+_| z!aeZt+MucD#&*#XQ@p!yYS^h|rq>_rt}&u_C?-K|^y0l5paxNRqzWlL&-gj?ZHBVe+Fx}ij&Kd z98{DmkK7t3v-3wj}j zKW7S2NP3OzC{Al|#^6AkqMe2UBU-}?61-YnR`^3%RSwMJQyc`_MR3q2_-YawR!?GC zADna9hhHEYVV@@y_|8VoG!3Y?+tr{fjGWe@6(h?ZB~UjoP?t-n(LFU1E`ImBzP{p1 zlS;W3jp!F0k~}k%rL!M!g%&+f=+~4;W3>J*z_7G&Etct3Ku%IbOg>^o)7nJa?LKX) zY-gMGWxaSe@I|CQ)t+jutWbL-qfCwUpfsaA4ON`OMuQZwYi}`sC?i)F9GcPnXDCSq`_*qE z4!H9(H>|6}?eRS})^DV1A8jw_mDIXC?&Ir>?%E+9=6skUjA*5KCJfgOe=FP3y{$r( zH|=0COOTiNhDT&ysKS!Y<{jpfxKB{F9wz-SCjQ30FeLpirY|4L!IE{9xGLW< zkY@18L~spOWe7*Jqq(>iASW9ITX0B5HjRXp=#iOM=E}U@XlpELgz`dd?$x&9Yh>@n z7?{|S^Nv6|iW8NNm%{wnGZ)^aFGXZNF)s`}I3AGX(KtAWz=$(S-O&J$nZF1O8W8ZO z=QO^TkW=d1blbns6vMAPo^3f4Lt7noSZvtBtflBA!XZ;?_YFSl|rI2~|*F0SBx z9)kSGL<0_RUagb0gd-HixPUVDc0OOGG%{k1A|sxand^!?2n_hI?}(0Xf6h8RL`wvk z7Zopy0sBAI6KdkcZMQ;DX|MoKtK}9s8V(Ef!RL<9$Y&}QlJf~tOC!1>Y-jZSzlZwV z(nt-pgAgu4R2F06IU>Zd6-#q8j!!JW4wL90R)4ay&60v91xVzT2{SgnLJDe$`XK23 zZd;I3W3HU7SL{`U^wh)Vh;P1k{6)iQg5JId4cE>Hyl29PK~}vs?||;!6WlM4<6!(` za*;u8+vO!$eL+AI>`$}U;$s^Zj&*-yBT@J9)%=SiL!1E#`zRe}om*3sboFa?0cSJ$ zv{G-qhgD1bI0Yf6<6KgPr z-M)pm8L=F!*#)z;hE5Ra24%4nmtMET>B53sWog?AOf)`iYWa^(AE~y)Ua^$fK7Jcx zck_FAV(P0;z;k}BbdMvNvgfnMnv8HpBClR9q;mWgM0(BPm*CD+n535|XAUck{)0tY zferHTwujw?BXmFWIXb;*f69O+w*HQRdW5sX-R;-<|8OQI4Pj#zBuB0LH$OZ(Az9xk zX`fL^GQ}@Cx=z3(68z9gw1^I$p$Ao}va+apXCTGev_MIKVC;9YuSPvFGbRtCF8e=P z?F-Q<;|4@)JCunG3jNo?LK2qTEI^Xa-6s-AqFuYnZ^juh2D>=+c`0;i(6%`K`{OGk z|NDluv76FE_`h#mpZ~vikN#hu;kEruRI~G)pOAyj*>G`gUS61gnIdQ6^1@> z+I6}R^E2BrmNjJUHsJlh=T$9UotodT-n3RVQoPjk4BAi z&lAH__j}3?%JS)p0OmUdnTC{}X{E+1P>&>U;0S)3#NL4Nhz3ab~2=f=6L=ln7Y z`&1u5U@KEo@?siZsd$`c!rv}M`UO3MI11>_UkuI0rIWJX@p1*>*Hp1q*DFZxbPjAq zeW87OBNdHXqG(29f7~@LOD1bBAc&FC@)M@Cv8TnQoH61=vFljirRu%DchIx*FZy6$ zfIISCPt>*eKc#f1h=i!ACs2~r!S|8o=KZXJ3lMue;hoVFJ)}Y8{QX^=WmoXwuRHW8 zqXOqUU^6)M56wF^y0wG8l&q*!>CDSwEojXw*|Xsti)sgCI_23J zo(}2moZE%ebq+LlzxM|U!aihvO7{nRFq?2y6ffWLTN`!kpZJG1g*hgcO*=FQffwBC z_T1kmmaOc)?296j>F5 zFJY<<)=wppnR4NEuABUNn?a0Z;>VIhm`(jf^C;v} zE2Z@r8>_!TC^o(9uS} zgnv=6=mG|8C_)@x_>JN67DQ-shSj?vN?#WY)+yM+IG!j5!@b~2qY*4hLW}TwP9n2A z4(GMEC{?;&#u*Eo3skh&mIkgwd3~@r>pp2hqWdv+*}lLYRq#or)Vk)25gO(PR(d$x(gL1Pa1`cTI`%*ee74gUJP&eH( zEn@sd$bR>s)YSfqxgq6p6}7JtLPi|B za;7~s!<H+IW=QwRf`y6t!`+dbU&_zKUj#N7+=CTVuSqq2gt) z_ea}dF*Qs(sbB-+{_TMZeX?4egHf{Zh6ca%Jhg^5{F{XOd@O1==7Fsl8BR6S{TTg4 zw6>DrpHstNwNHG>i(veKaUoN4&Zc4CtuWE%+FA^@?kd+s#37t$s`HLOrGy@e z*1g7jzsSTxWdbQ(9sE`DpVeMkqgk_Z=jPlvQ;m;OFy#l$)fyi5n~my}BzIgRV)8B7 zT)I^2|9;W@Xs^eSa1-3%5LocmBd9me+)IB_#@-5hJF%V2$bBadqWo=tt%RY>J zX&Rq&tA)}%Wng^GIHpw#!0 zP#7x*vMv!91`I#n5s~C8M!rFk$BNXff65GglY*p`jmceY8z><@#;q}~0L+50D)%|- zCr@@e6wH?5JMD%>CpE${`&erlO*HGDDeV?`UKL?R#5xIyL~a{jdXdnqgP=VcAb;VaN5(hu8sd0CeIf-Au`+?jA*Pi~6Wq#r$gS z*GUk=@3LSYSZ4TMT<^&Q?+^NU%cT=5tCveLqED{*oM|@YwW`V(pnN7KZSu<5cjAM* zku1*U z^I>)2*b(zL6rZB9d49^18A*bx`SKf@d+ii*i78JZ*DWr)@}y=c0?P_JW`6&qeWGpk zxyAZvndCW#tz?EJRDb6dr*t8X3HBe_pp6eb@?t{b6 z5EAR`hto%+GSt8h1*U>CKV5w< zo4EL;#}!w8m?)Y~rF!SDDErb`oqpC&C+@*AM#m_q8p`R84X*)k>h7Q^ zBWLNSu$%L@ImPm+JO7YYN%rVDe}{{I*48gDp2&5|^FZl3R3Gc|O7ed`5DYUAs&o4h zBqNlAet)MtJfsF(R~E(2@>p?`kk(h1!|N9DBFG2O(Nt{6dR z8kjtd)xfN9?(BSgL|XUthfd5|$JrT%gh{fyAvV^(+K2LMBBuLvhry4&w%-#S%U@3W zZ#L|09uMc3M%5+XlaF?TiR+~au|_K^`9D=n!jONLMYR7>@frQh>7a8WDxDj3F(;~k zYxtfQiC0!t0!T+j+^gTxRG3Mz>;+7nEHAlsaaJLm{wg-D%aC8DJn=@*I2X6EAxK{h zc^`}u{0|?v$f--5d1z!*6PNLThHFYrJB#m2pyv~XE7Z0AHoTWIOr*9-Ueewlj%jS6 z({iiF_0+egicVpUmN5<^xJLc>k?EL zqI{hQvHW?ogn(e#AT1`W_85Tr_n!uXMZmywscS@db1>s!weKrAQM2-no>LO8=`O*U znXY0J53-t$TZ3p|Gqi*Kny$k};kGe*mSeX84Z12>0&>#f_L}sV3wl<2uDy5X_BU;< zor*xJ+IZ~u1zH#!^}qV@d>gp@oIk0o{2`Wp%=ejt$b*CTlBI7$Qc^OuY8cK>p_g#K ziXG57u6{eYFX4`|PwP`2?s_>~tikFk!@nrX5|gpT{zhrh^Cer0YU7%}G7b4|`=O8cEy zeL1o#RNHgjhvMolwfn@e11~dnwe@GG3cZ97<;qE5=*#t4;FyH$xPVSs_4z08yYoy` zD#*;;?$}E;XBicwjMj_+AtdKuRkHDB)oJ;b${R2CgD(DUK)+P{ z7S}y$Ag?DP83l~CIV9D%tdRW3NF5JIKLPUcWqz(&iz+eILa2$MkB?Nm` zL*u_-xGJUvOZgCW#E18v;|wSX)2NVFuMlC7VrLnsWMfAldG3rOG;q@`XkjAxaC19F z;=K)AQ<$iDPN_jN&+=kWz8p0v=#D{b#YX=s%mNrMRU*u?5#gfh*a? zXh(O3eY^>O9rs2&us`n&y8DAJ;QCw_VGxHJ8!I@JYl?%#;#<@*FZsjtsr~6`Z~zS< zj%(d6QaXv?A_=WE!Mq&4YdGmqj!fIkh=da7ur=9WNh9Zdoxz?Y3JaAq1XFGEE)8h& zy}lJ4D5>ZmYdx}xF3#_*gD@A(C?V%j*Q1kgvfSTHS!$uf`@1(E(TH|!eqKGbx4tfI ze1sMd#aO#7_lp(md3 zf?A5zkhrf=R~BrVU|KTnR%aF*WZdQbmkC>x##PRIg?O|(04mPhJRjQl_ZtPo&DG1H zG|nO;EGf2=LEB-m4uQ!spzu6v>)yoFn`kvRVvjf{gF;EzId6A6fLx&bY_Mx!rpunp z{Et)OlaCK7OOAKhk#i>0c^g4GWA0~PQFR%rV|;| zn2bS+bt9a3+skt{bghC?0{=IXcm!5O^G^O895;OzH~5khYk^Z)z*(2=?l|*wR#?ly zOE7aQ$k~XM*8U+p&{1HtotJ-z-=YqCZ<&h`%{_aYtSVo5C!W+pS18bFbr@bLVM3>}3xz3F6n^ur=(+|IB54;q|omU$mx%ddXBit4f_u0TNhIk(6H!=UD@S z&991D_!iUQkN$;y<*9k`F0#FuS#y3bwqb3bA%Jf$vs@E;-M`gQ=-<;62$GwxJ@?Yp zU2Ca0M<+{qsJMBwuJVNX{7TzWMm@i=8iU$3oW$N+uPwb65%a7h;I$QeRqxz@l`#0V zf?=PghsBi$cY3BIpxyt60koa&#MLWl=s9n$zkFNl0RFt zY=`e~vSG^W@r)1s69DSSQSm~CJZs_67F+;>N4Npv4_|QxCMKuEDOa=uq>>1=qmbvv zEpeJ;RYokjrWu{uf(3KanR$eoHo9*~$2jlZQ92Y5gkHiav$#$#2X!8w0+V5gd+6cDq8d1+F;%zWktENCTC|akBWN2H6fbb`oHcPykT;v17^2b{R5|6Y&r8 z%e$gC0cZySr1$QY@yu~T*?(Xgc5AN>nP)M=j6nA%bcmLA=JKme`jB|I+T}EW= zk)54quQ{K!esl8&S_ID_{2nc2{&9nQ-9~&IGS3!Dm>M6wp(Mrn36IG%XRVRLU)RtB zPE;I7*5xMR+4q%zOVD3%=A+gS?vDKBrX)6Fp@{~;-ww;NDTPxQ({WUNiscFRafv21 zamr^i)@44MhyWlJ*OW&`v&z8alYP;0-0X1gyDr%`q-Tq{#1EPaCA(Py^{W9;lql|y z^vzP$zz7uvR;QQJ=z6)GNyg74D;^o>18%JfE=`gZM%FJ^8C3_8i?@b)_p5vgP9g?9Po>xD;%@quJ)~40#yi4#)xhhU`OITSv{g#6 ztLtK_MQH5vyaDyFD$&U? z`(S6!4M5}4vDyDFcr2n^7l7%^t(F9ZExF*nnWNh#7u0f?8{r@7QUChx9d)Z)Lush+ zfzq2$bu!FWu+m`S1N^wqW>-2AEE6}2M{JPa%6T6iphatM$=x+6Bp4dG&I@AkA>ng{ zcjR9l@Wx6&xVX$@ZU!X7SV-+1ntjC^Q=gO#;k6$Ip0gmE{O;SH>h%$=WKMb6E^VdF zfz7>+q_*%rUK5JBT`(X`Cx4o2d$`@C6w3l|b9hJr6EP3s%@7{_$I{YjwIJs2NRWJt z$=knXq(3S{-OYp#CK8>AU@iFfR1`*ypfO4I6ipWg(Ukp4Ya*^0qR#B{&VrHL5@CWK zpZS!!^YucP+!KA;qfXZexkwi)OZt&+Mrtv|;R^$d0Nl$We{}I24|$X`4BI(jX*~F2 zeY~#r zK6E+qz#QB#$c-WrI(ojmv>g4H0AF%He~3}YYsx2djtg+;{_P$Cp_cXbf&;FD!#Cvp zm;+n4D6qvzuIdd5`*%~VkRn>1i5+r!)|6-}GqRjeAY4Dn_TlkylDy>yrue`wY_b;pBzZhTdF2uE4ODD zv+L&@Ij2RP{vhFKMj2>~uUOGsnqF}2 zVmemDL&Cj`co97*J=Mwn3 zz0WJ+0blyiOK5H8MIg_Ezh+55gq`1)JdkZ~nbQ%xnPrY+r;=>)okc2#lxfCt;t0<{W4wJtN+z@3*MVALQ+ zbWC`SY(vnt*8vYq!?ox>>@x&NfUiLGDH)ytJ~X;af#=8Egy=nDN+~;!Mr4Gnw?#PF z^ZJ!Ovw#kI`;xE&V5q#5RvI^o&=#m>>%x*4VfywF<~p>Qkx2HOX}p)8G=qCij7y-D zDG!=>ptP_pk~4KFA$^=pjF(bI%+1IGhI6tvsR0IT8WNCH<5m04fKAUk-HX*E6u0(f zHP8*eSnaSaYLmrXDod5wk2SP!J>S2|RXm0>tg-Cw4*Sau1OD-~`;^cKPHhJCqftL+ zw_eig35`wEq9-R@K=d}kP|J+jw$HdGqU0{}_z=x>%1idZ$`Arl!J@;1t3zjoV2#^d zm`*m`ORd>Ul@$y#!%~FC{p)qQ4jlTJaG4pM^3o3RWmtkL@X?v;N_P%qryu&*KY#jE zwt2Zrc*nM$GOrZ=i-nE9I1h+6Sm%iQ?B2G?@LE%jMji6(srsW-7O9~4S$8(AB%}bu zwQ|yZg@n6Hyo|~TL2ynjBBt}pb|Eu+)}~?JDbWL2Rco#B3pzyz!92(?MkY^LNCS0x`3@hHvBc(U0z7*Q4BW=XTne_W&&-&ub_jNQ#T`!`{Nk~Wey5_ zRn*d59muyy<)+~K^xf3KrL3*COEW&`^z(W|yDfinoFXITJgLmN*`IS0&z$dicv9R*->Y5OHq! zn*>XHAbN7^w9j)M_0(x<0zH&`l(^<2wb{tLQVpGvndLRikcoi$U&`>s!}Mm$(dSGXeETO zb6>xIU8_rqh!}|d0=JheLaRySs+T0d}OP2CIA3C_bn z9;zdR8l0xbYM(tyfARe-gDj!JmoYy$+vOx7Hc}J%{FDaT%+}9tB-#%POfXzw9WCeN z8}jZJmY(nu%rwtc%qCMvJ#gXH`-HqRg7H(lL4UMjY?LppN{Qy7-8i0QROCbC`_8^Kut+2}|Z`+n0z@ zq-N_*i||oddXjgawVH3iSm*YSCnk+_U@<8qS@9}l*Gw0+BLC$xrhXp@^0xaGJvc4v zuD(ne0($gOCj+=csxnb@w^xc>?t+kbUux5dulOIAdt1VWowvl9DVnRZ9W{r|jk{@K z>f-Xefx=~lPz*6FtRS?=()Yq99jLMc%Y}PGP@TS3k4G_n>ci^1_ra>|HANmmpX-OM zHlAoi;QMiesRY??k36RW-s%>2&8Ux~-@kdE^ zJ7B7iqG!s@H}=hM4{KFVFh9@fmZJ@*!wc+@(xCwe93xBQdgCv5mCbF$dKbD}Tzcy_BGhr>8ZO9>Hwx}u6HPdh ztrp~A6Xfp8qD)-Gw707(w(UO4tF|Q#y^>FdKZPA1%Yw|3X)b=z(iTl2ggKe>_ma}W z(+g#~N%nFcXacAlcz3=5k$<NS8W7QxE7hyt=CK`K28*c=UL4X!@bOUQ_;aGqQf-YlcLyBag)IU$tjN zGSh3>5m(2DZH3u0mvnf7pqYLz#^b#ckQMz`azelo)ZU zjKhN6e-)#?QZyG3?{{_Z<-*+qF%PYH?|F=C-)lquav%dw@Uv`CY6X;}tRHaTktk0j z1t;{c+CEEYh$30<6!W@YpT@Z2QN=f0rg>oNoMbMYJ0l{&p?6xd)ilr&@k$l%mjyx7 zPkxG~VLv%1pb3+dB5HA1-?k28H0s04F#9%MlT2EevsSos9W*@ZKy`PfU+8(|v_l-` z+V{I}dwLTFYmmlY8(I3F@31PeCX(e{vYu!mP@D`qanQ&txk?rf9)gElbGe)^<1w$&$i;<+BA!4C6q zY(%O&<)3P1i%9zA^H4u(%46IfoVP6D+Z&i@S5W`0PcTXfmxu`9zHPz0#)}4FqFxr2 z!Er3*8?~-H3_GHhFL$NX+)ymT5D22ayZ3^oR5u`9S~7o8iv0FHnAdpVj3JBS6)c># zjNeu+vY)qXXc5oD6sO>h4ftH>6#A+6xDQx_kvfiCKQLft?nr zDOiNAZY##^Q_Q=OuYDQ4?X3L)#c)J(*P>GUDu7P9SWHgQ*DV_f9X1*81R(MA?FV{d zt9zQzKMh3OAZ2^>M&7z~`cFb>^L=SlJ3E{Gmz5)K9S9;~zN74d<(NHu7*=AM`RE_X zY}L0Btnh?Sc5SGFs=(jdiCZ8`3Wa8Uvq4s&EGDL;D*gJss8W-N^95ATcLvYY)*Bi( znAhQZO2@qR6$XHN}PQ_eo0j!h@V~J=6AQv37lal z&cXlo(L_b5|AzU<+`vs&R^t|J_{gDned+^B&elNpmckR`x%^l;%xVa{Is_n zL=r#GF`&fI(N%@!B(p@J+Qm(et~C!L3L#NvyfB5v=~E{pSF9{d8f6fWU2j{NSDE9Q zH})Y}don`UA@62f=%_DUGk2W@CIy5tXu0l*ex_9bcW?6N@~2Qd-Ep!fs-MpkNNUJ( zvq%sHLRXxnhlJ4NV;wB6cXDoz=2oKFxon@)bATynD~7%Q=2pIpW~RhUe%fiv^nz?* zO^FV}E<(Ra`ybj&hba-ZBh1g`0 z_@tD>iWo&z=0kH6H(e(idQaf~Zb9NK*O285htS?VTG4vrNDz%1{dYODCs-&kMOBvp zx{7@r^Zd$ca0{+afw(8EJfx?ZdTf{i8yZ~d_g(d?n)~Gh9_;1d$#I`=uc9jIY@UZl ze0@t|Jt0Q&tW#!+6)*F142u3?t-@2g6>Y^Wz;KJFhUtUV>e)ijW09-Ypv_?Y>y6xS@S`wg*#JMyz1y>}LDGyB+vFKbTl(Y!#mENUP zg2eN6pj7m{r1H)|*A?GnL_Nl76&mJP4a{Ph8lIB!yVkHGxiDLLbE46ol}lw5DZ82joWzy;F?C%1rqD!a^aLy8Vo}KUD6q$_2Rxr_)nr0| zlNz1nAj0&AcCu&YhE+zI;gb3q$!`5uH>4p1XJ@Oj)g(xlzd!C^xsEo z6%!GX^FCvk7-^Hfo9eu7DgKwyl!$0KRQU!;!~qcStbF@%VUjuaiG9t#S*533fDn=% zYhY>z=rJdX99Ud{n4SP%E~UI|)k!i_mf_Tif&M_>eZO-->d9`mYA$=@6y#%9E84$q z7%8TkW+@pw9eM+G{-B)ebA-{Zmq*_fp4E_AtM<@O1W6KBgHFr`u$AcZxB961AI-yD zkHSt2D5E7DM!RA!33wU(i6mi`-Pbx2ke7d{7F}NKKaB7NPp%C-iJv6H>R$I0NZP07)XF-rwo)d2_5SGFHxa zC_GN%J+e}VghcMjgTwsp?G+Z5dv}#kcBaF~yZTk8WyC^JbN*dsOQkRpgkXcl@JD2= zI(`3e4g5zI-Un5W{RbZ;!eeFrHu1g#ZFrHarRcytJktG=G}1rc8P_^81=mBr+S&HW zDphVQCj#1!x_%?+YLw;8e?#t8E!tNH`^E$5nKysWKP%#RJAs`{8Bz529l7sVpm`Q^ z^%*I%v?b7!j#m~mtMu7gC_b`;l3yCIj2P5dBW$n#w0GG+VxKcF_=X zvfm^o^Ys*1-I3Mh??L!hs-jmBac>c}rd?pfne;nM5U!B-Vc!!M&X2i37%ST~Cl1Q} z(BgiqUUBvtx&XKsb3);$`))|#%f_X1UTIoM#;(;%BTXm6=cMTY?U{KzP-yOEYRC!c z&0Ei%xP85558Is5%p#Q0A8XESma7Y*CCr>94w~f==qm^%P1LGK^d#1k*adS0($~hIWvP^`DUm=UVyLoGUkd6*8`}0HzEmYnEWc$ zR~IpH-eC$0ugw(#hu@2Apq1wt5i=>Sbu6ut8C8!t?hGX%12suqTw$k(K}IB%h_#)Y z)GtCHfvNjT)$(D03s())kjGL^m;hI4)M(sba6ErVAk>Sbd-fGSod}2HmfQggpk&o}~pC*z?$%PAmonJ+jg3z&QyOZrQTd~F6-o0!8J zWa9utmWRw}Y+1c_8L34zV_@4+YE9M|21Trfa-Q%M_M{l+z5mu-%FFjX`-C2Av{A1C zrOh?ndB6`J%6(kQh_qpK4GngP4sfw+aZnEyfSxm&)x?DrXl;tnmC$jbfoI8#8=8RB zejJJWPbx2W9wgj4#O3e7*6=ihxZW25(T$tF+Ji=Vq&pj%CQJ{@8XZ0rtF@6RQRg-k z^>PEnI(bdDyrmR%&Ece#zLL&_bN_N=)Q$6u37+ja7*z@)LWL9@c7ZQ~^D$Hwfb2!T zr>U&2u$tnyrrjHgf4}t%qG12=6jXwzbcqOCH#(+ENdGnQ8&lhn(gZSIiuV!*CJ2-3 z8(UKi~f^uW|E%-yN^e52s`6WnW6|Rw~Crq)Z3Lroyff%oG$r7_Ie#&O%mjhw zBC@{_2yatO)%Z>a;SPva-01RkA#^aP`OE-A*JkzU(U^2kB%{^Z2uioo_E&jUprg{U zI>8K=5!r#Da~4lNq(Wm&b-qRaj6XS|pD1Dsn9H^GF(qr+RZd?1CE_EL)n5}K^Mp?sQM-7$CbhQu@Pxv}(oDu~_7s$% z?QYr{aiCtDgZ+(ox;b;^;;$7`CWpVq$-A#ZUWevgJbcEHe>bFS1cfVznS^xltUn=R zW`pQ1W$c=A5cVNmq`o{~reyrmNZ*k5)vLy{wu$iXsl`^lqUiwv!Z{8R4<92J%vm- ztwla0cm^B)+V$aH~>X^Uy#2AkYYHQ1imLc>L&=}Xv~ zK*P4Gwq9Nd!e0KM&=6zP8fdgjXxNlgKA~~4Xr0jYjII6Y0`0p3 zx>WFz$B@XA>#?3U`L9}{J%=pEfWA2J_v4BebCIOhf`~Y`+$?^%eLF}Ht+IK@2>6_z z<_npAPE@ff2pQ0I$P1}YXPu9MLd?54Z`t z$~YS+y-|4jHO%i`L;QWXxRRQ%3*WIK253yR=7n27R8&bK-Pe#fD?BzF2kH1G)6UZD zDt22{cNG(Mgfrpn^A9~d28h&ST|2T)?}zU1MkdaZ4p~ktd?>~Zo;rW%1gW9ZcD?#1 z%b}D192o`-9m$d~;tT|e3*Mkq?^JE6B>xGzu3xu@Y48j+{$-a%G=8p%k%KQgIx6}%@p8%xcX#J364Ae9l5H2S?X4dA}j*wiZo5gyczg zp0BZ$_V6%njNA5dyH8R8Y~B&DF0>=TNg3Nv9Zm&VD&m>>Efb2qA)A-hku1mpEQ|h; z4w^rhvT z^7l4awEhIHRV0MuB~J8!ejR;{EZ;Ct10rf`N~rB@kK=QOpBs zY|FTpJD&(4;-IlO%;Lao!=)3XRp?(M!ltlye_9+Cu}u$J6UW1P0iexzKTM^>+iqna z%eJ)&2N}RLq&%?vn6BmB*pQCWr)E7b!nQf#&CMIL*?wQe`Q}g!LM%WXIV;m2mBXzQ zkX>;J59nZE7EzLlh>~-C;Vn|LhSF3rh8EV*VN1E?V6$_)BB|WCKQiqi6W!z zSz8ydWqAN369bkP?fRGEFV&!_*>0DUl1&rpJ&?hZtjs!u{DTAE?8_VxlYAU@)%&o? zO{soahem(0k!~l(S{(zVph9b69hbPf?M-9V=IQO;F~Gw@LCvU;i!PIg)CfPcGp5az zFAuIMlusrvDh5Q=ry>;W!T_~&2-9B|!KV=f_1Gk|AS6oWA?^B;U-CL_;0O)CszmwKy4k*n`*KID=ET%d2EdJIiJ+<+4C^hd2s26O>o} zG;{Q+4w^WK&(P{%+A#dPC9uE9cO!4k$?p0-;g_R03|Al|O!&hpU7Op_Sb--Igczpc z&sd+-EE%|?R6?VhnEwy~U6npG*9p4ulk{E;vZ8!f%7ypmw1ZuJSjNRJvynO9!BHou zI07dfz8{y7OAUyzvT1bHyAX1bDAcA5J@}ldHE#!ep$51ghGY0%J0tM+7#@t*?`PTS zk7|;P*HzK0U^gHU8@raU(YaiNhW6OLx3_@87gv`dg;2K7YquBFP0{C{i~L83CyzWE zbAFe?$l|}NZaM7r3!>KriNs7mAk5f$HV#E8_!B6G=LUs3lZ zC+QXiTv0%`NJpRK)jLESoPRiCfIL1^X^M)A98VWVVR1(T3)7tBJ{GW(6izBl%86Y) zw7utru=dy(-?AEr{SSK{E6dD-YZyJFOws!q^N zOKg*pgub+id1oQH3JzGirw1uV55{#g;ha?8yM1~O4)HgdqYN<8+<&!Rsi9!iL8-of zc4y&&x4+i{bU0{W+vasa29bGW&AlM!`LG3zCH-)})QUs%I zIg;k-i1mHsY$J1pI)Nj7ne4#Obv0I>THw%Xm9&Y9154qzE_x!4lRSMCWNzVapKuBk zaIa5u`jZ(6Z>9*CNKH0Jn_W_6S{+gXSCsXsu63Gpd+;aELPUHLg|=VRA~#i;KSXx+bR@g=J?s#M(l7_5!6 zsyKU1ivYa2DmP~tF3Xyzx_lEi4G-Y=R1ajzBK}C52ya~Kp62tjPY$Umk9HY2G&My# zvwdG|PF`8@YqB*~m)b8mIYa%HwY^U@eV0eMMcJ4)O1;~JjjL)Ye{0y=>ig%o#uzX39e<rJAotHuSmF@5{=0MWIqhMj~jlnN8S|ACas`OiDh)agVayAa)!`} z+MGc6Lz9Fg1@hq=QHsjdts>xa7*8)qli@QO`{^gen>;wpddV*8If0UuPE;qu)RDLO zCb&y-P;`hUt`;<2bvkK}SP1&q#Kqbiw!IakJV=S4JP*jETZeyWCU48u(>C5(T8G#A zbs=OoR63z2H|To5`_CNSX_$yZL?7J$&XB)BMrLt#ll{*wmiZ2QWk)RqTYsS=d-N|8 zW{Lv$zwTkaU}bU=vBR=&rTV`T0ShNJ zX1VrWv0A5E1T$tmLelQD|6?pn=$~5;6nR6Qp2~fa1$YwLLmYoKg$I=n{nK0!D7Rl# z=Mz=Txl^uQdG}*0jMY62jU}{@ioshAYDxcdRhE3#9qaaOkw0EcaKyeF>YGPuQr6hj zGrjIg7|b=Moe9k;3T`==d8Lz0JzD2S9)BqGZ*_x=4`!_}+lc);9fHy`InUJZ0zEAy z_0?((tIe!gTjcg2q~n+qn9C63ue2pBes7$5xSS7BER;#7v(Hd zel`4-vZBPgIEA!9X^toqkuf(VIBkE-<5BCT%+acliTEVIYtkv8!( zSiyS%VO)Y!BI~l+q=emJn;Ym#f6{UQVPzzKd_gx7%Fdw?-M2++dqaoU+eLIm}Z|6{f6|bSFeaieKO1@l?nWVnsfv+iC7GQ9)OX`XB z%EIWFuStpe6VPs)EFQhO20r8DluCL>L|Cl4zqR}v1CR>#$q#Ow{9%`8McuQzQ-wo< z-Tq@jnsfA1Ec=%-k{8&?0cVycutteHqzz_pTsrMH?`})93x0Z;> zX*C_5-?dKV58QL5O{xecEGz%8vIhC<^nc$`FPLVjZ)P0I_H>oUHb1w-Ef=&e2+a6= zrskZVdn_*fNQyM}p7-HB>6EebV70)dB;n*$gxCGgvIv5b3pKlqKB3;X=1>I!SJY#X z-7jZTkW63y#7k1x=+rpZsV>e?8<@L}wK*o}{chU<{W+?8}R=sOa(4ezQp; zyyJY9GwFbKL8$mWwR(&3x*ocQk(53?bOrc#Wr}Z916_<|3wcDoO)ky#I}(Xed^gyw zEb7Eo<*73AL4u`*ErTSawX(bz6^F~)uzt@Y$Ln5L99hR%iU6~Aq-$y2(}?jTMPFmW z=D#nhvF`g7zH~773D8p|+hcXBsb8vybk?I~9Jplm^Ym!YXg?-r)6r`{H66xT#W zM*B(tSK4OJ!d=nJs4GC{Ap_~h>6oX&V#ZCp=6tfv^pU1I9+1#}dh>Pa z1=+uwwiWu!C@PbC_+O47T7pdf4!A>#*nJ>3@D`T54d${@dHD42c2&H*_=-_ova7bo zKlfmN5Wo0aCZC;V@$TH7&xb)8`vZmrnu{vP0X{#fT>9R2n|^cb6|P@J^3XbEpMl| z@H_gBRYYoI6p!Nr71oadh4_SoqL2T~i$O%nlG=aX#T3PVAH(_)`u{j$+5d!gP&DeZ zfWM*hAw-`0hmw4>hBf^x@7 z%Vj1^zNzWkvFX;`qhcm!fA^CtA6mE_1=S+>Es%ChfGT9lKe}jHNK}AN7?34N>*(oeLq3|{JmnjwxW3ZH4XyS-`nUNcrq>;Po*{8wNS5XJ?)slBG&d4 z94~NyLt-GIzjtk0-}>loTlf)2>@}teh;8$-1C1T2W|8xNrfw?)^G9I3H(%u`?g3Jf zkj%pz3Ql#>=@85C&vwr6bGNy0Y-D<1X0E*+^ieUzXO2I7vG>P6-^GSN*yY33?e!2K zY^O_Gl>_kQJ63H{iXQnJ;{_b=2PFD2piOmg-LFNtla zo}bzYNwo?Ke)#6AJ7VSS?JN=~8y8r~Bm1&|V0G1T zo0xginMl-gY2p=`vmpVbtFh*5Rcg_sC0_?R+MLZx+@*nC)qzyE(FofjXRQv>OWeas zU$qD(76S}O=o!0D==c3Y1RR9BTyM`z^8KO5EJ%}Vn3*q+IBGA<{}eiUi=`4YsP0U# z2D7)hq8eqWS4zv}>AlyR`ba^2T_USso6{(9Bpj+c>-S|iIs@L1ImRYh7BC-AH76Rp zG8R6j!Igp~Z|}EmG048oklY*_6u>`Q>bsrY*G-fpxK)2-f9*K4qF9LzJRWhXQ;TlS z8RT-iACQNjUvE}gdXq#tyJ8)gWfHMzLOsH2J02x_mOf{!&E365JUg0X2HHvvES*|K z|M*b{8atV1MX?iP8tD8h;OX$vx?sC1P8zgGcFAggZQ_Stkv!bLn%EHazHWP#KqPyYbpuw^lfs z;cn1~G;Q1uC8GnftA1q}kMO#+p}Mv4O96#l;CYo=td{=fK#D}{XkC*MgZAhX@{@w_ z$x49%<#?X-zBp#Ff6KuNLUGDhaJ7y-Lj&bJ! zfgIzl=7N(wO3BThAnx)ne2F;H%OmAeFFQ)J-icp>#}$w3VfoTXKr=g-^$gQ3mcc!jKch&4lJ3QxlEBJNt z5suJZH{!agn#79=B=vR`$Hi4&o#9G{v5NJ#$-XBIubjZNMC;+uS~@e3fI~1MgE=ZM z`VnmtjE+u6Fm(Og*rBVJ0tz>f!W8fZ43g#8Y>`h7#9K3@9)naq3H-{@!xC@Q6wN_o zjWW=a?&v601bmcrvEPUdj76;6U+>3Dj{U~4=vAxgqH_Z0iTq4we)NhAwlumwsYFch93ZrJF$&$_hzU8eV}iB5kZU@CDW{x-~3Loz-T zrtpBYOFSO)dOx%-^3aD(ak5C@Sh97dQ`mB!0VzW%kPZqeA`}!rg|Ko|E|1oLZZV=D zk36(kd(U)7ZeDzW&JBq9l zls917!qCYfp%I@tmIi`jI6yqyoOF~+mYwp<3Bp#^EdVw@!rC)8P{#{5)5{VkrLG$G zMnYM2H1F##!z>AtvyE3-VkM~>rr%rgAc8=y;dI4&4{p@fvs2<3sSf>uar?l=ZFTk z27p-uJVEcs!$mdbwt@mPAsD*uFoW%EAL<>z!ZRLRt)A6uaGBYkS^Fr%?hV8D-`n(P zGe_WO698v&#e^pYXG8wlZ!D{9+yQ31T8DmRm`}u^=Er^D1=$-ekL(=IZV}3)4zRb^ z)bxSWE>EbB0C3F^>N(wWlJiX0;C24OTkk;x@;x@t;3W+645j0Qbv2@}}_aFM3)tN@fz~J`1Um$Kb+P)gt=Zmx6(C z*`led4m}vg`E`Dre&#du?F??JEdjkn=imuMcQOan$_W;0Z~&bs1A_-+STPp^55-CE zN+bDqZ2F0aAH-Kx-Eq^DsfnfA+~~oV2cd77oqrI50{7$07X28GpPQ+-n3cl_Bcmd_ zdoLzmRr5N-8^PdTxil-=3;ebYUiV}a4DJAbTgrlm**=NZXvvF7>` ze!il6Q&Ux^E%l3QWiggX+cI4va|dJa_M?a$k8P4t?Ar6d$Cpb+RQ7|-mwg;Ng@RDd zmD^OcHy0u4-t|(BUUj5n-PonE_I%a6pGXLsr04W+oQv|hUrS|B+rUd_g0ry7w_|V#8Md+hkf)67KQJg66-g4^EAzM<4o@= z2=FTlNNDrFzm^t;wq+!hEtVEQnjz!?51B_FR8>3&=v7HcDx}X43~KPBdfo(vU~cMN zx!j-QYiN#!nL}%1k~!Pox@Y(FYs8&;S_ab5Px@^U|g4AgEi2N2DNb@i6KP<%5gdN z$!_C>CMN`8z;3*eIPbHgXloX3+2QIw&GCRkll222=(zg`57t5!p?S|@AXoP0bjjrM z#eTW0?h62@F@A<8N4E3B3~1uA2TYN);_r=AxRL%0>r)T*>wLBEba-!?Rr~D2v>Y!# z&iMUUr%-+hr>#}D>jklR-Y-jB5Jx>j#hH#4vvMuVPQnQ)?7VRWJn(UsbSour=RhSe7M zHPcbPm{H7^Pkch7F)K)1%dLs?e~XkD8GIwNX?pI?<}sMU4FS(i% zU8PzUlu?jcCI(g;pQrcd;P2`wG6P>5_3y4dnWW(6&Qvh)ptHMueTuf$)KB${mHx6e zr$)pyl@6%qk50OG-|qoiYzHQ#8N%_!E%1?5vqE5!gQRq$ppbSu+7$TP9(QFqDxPqZ zI-isfn~Dr!hOzLU6+Bgf?qD6x;ZLQP2Wz|wisSIh0`4R% zicZxma6E11HGo;6IA78L085Q~TfCp8GyFL7Y_pCGIAp&MaPE@!`cXqj?nB-P-{XD) z>+@))OOyyaX%|{FdMJ_=e_B(dy`pMf8j|YBFyr1g#`cY^k3JRFwsNWdT5Dpe^xB3( zolS(xf|zEZsWhlT{D7QpV|Z2NzM-n}ud9bfcaNHr8SMNNL^?b~8mZqF5P;WzU^Rb-wuh$*@!EBOIlccyHP#!kPfdcorI13P8 z#vuJ0(J<;y{J>IvVXqj{abvrxF!Xrd>Mb#Lv1 z{jBP!4^ESs9d_Bt_$hG(j#gijtDsN(;-CmmzJYv0*;w_-yBs3g2psNkb3?;p4gC1w z>#}m=W$s2Di!U=E);XIrUz{< zC)*Xn3g4*Ge+|PHIW>jQn544MRf3e|`|4)35-P)3s3%4>#LBy!sa!d+4$z{JqfqhfU8;Vdgiy&Bf7d zydUFrigV(XB|C9y<^_>{MjG;UlaR3LeaS+BpQ>s^=`~07j_ntM9=e*BAEndCYScQM zc22L)Ua}7HGm~v7n?WVH9Wr0){rnulH{baLd#Cy4_3%Vf5?+!v@=7Ex7C0rBt8vo7 zyXDj?UP{@P;IDIadpi7p#HU1p4t`jm)YJlxh{`?4|0jUy`ZzqKz+QaK7r zdZI)_l;psi+UG~WM#OGD1zd=mBa35T0P7;onK1mj0~fRD(1AV;QM0x&`QOw-pFXX| zre~Qp3y<;pit?ag=14;vW=Bjx&s5YSa;Xm+BfN^*_aK6ypW^;O?BkIeAsTqlBlN#U zI+Kh}xO8$Ulne=aquS~TI_r_(iNL<;hk}NDK^{4(d%0RdF!-phD?F66VjW)xy{?YS z=_$eKkH{Wu=8h=SNUPR=X2K*5f7G?DAO9&_(LpYQmA`BoP4blxm*U&Hs zNS7iE-3%#6N{4{bAw3{5q|%+z9fL#H#{YiKbI#S-&;G98@4Z+TYrXLc2YSL+WR=NU zD?G#4%^`RFy)O?zAHBVuXUaSCzxpjbqBufOc7MSq3b-i_A6LjYRCOT|Fl&8D75<5j zKRl5oygFOog&=EFSx) zk#7ugZo^B_wphXH$q0xF8uGtd=>ns-t(tdi35A=9wM%(467DjUacfKt_Qb!4I<*?y z5rPJUSKi{)elwB3M67){ePTN&9I+za9qqBdHA<(GaEJEyzmnVRZzg=Y8%E+}yYj0o z#ET)xNH_pS=%F~LXeLV*z?sS+|FGgv-;!;b5I`_m-*+{61- z?DmTo5^k{ONN;KTI|nnLV$R{;{Z@#^@hdv|QuWq)xtSP$s5hNLfu-l}<3TQ28atf` zo`rahgv^){F~PkV?XQ0>k(m3HQ^zy3m}TW+Fr#RB0H1e{0p0N{!fKusDg4bK1m|0p zV`lc6YU8rhH3o4NI}HLcgKe+dMGTl`f1`^KU-Yy=iEvodA1@pH<}(WsPSrOgIZ7Rd zN=TjR@zM<04Vz+mN}6CuK`^ZOWmb(LY4W58N8sIJyxGXZIKJWVPJaohfS9sL_M{Q_*uZ&N0Ox(S)&$ zZ_~k$I;{?kO)rwZ_;fmC09t1O9a1ExHPS=i34IV}%%ScoA_<0Ecw*pv^OOHv^gEW> zD#yV8oSh8(RtyU+=-eg|b?eZ2qC*f@&-L|lnDMJP%V`Drd0^0MnCT0$-)hSrv>)jp znG*nL>a4|3R6o${;p)6wpsAU2#k|(a^S8~64ST~_6QRJHpSFcH59?}puK6(vwxhJY zpDY;{#r-$1M2FPHSu-r;ad|1+HTf_lho2=Xou7FQx^;zdS8dYO&6M5im9OhOhFaC8 zFUWZEHczS|%g$od2a|ujm&c_fgr&M|+=N2aaClloU$T9|l{$2xUB1`jXq)DB=+@Vq zphTyY2tLQwtGIUt7}s+Z65+oinSFAmr~HfMd4jj(RX$Qk(yjeTmX~hrX`>pP=%$Pm zwtWCFjN|%TllS*W0rNHc3avqhyoGkez;1`u;P(`g_xcq6o?3wDd#X+a-dH&(8IHgy zzI_vXQZ@vjqF6+;dYn%)Gu@Md4@AUpQyp@ki*5G(fb}oZ`s}xU&j~|K8MV~QA$1Az zk=k)4t~U-1{E%5(Hxd@Jj5#M@R13ookQHFa+5gUS#$01yj`k-7Bm0bTK`#5EJJ+=i zu2Gt&=OcoOn2~5VO|bO1UtrMOhICn}wOM#@a>kAU60i^Ecb!jGYxGwq#Od^*kkebZ zr=sLoI}n#!S!sE;Wj?X~s?e+V2N~gSi<~Xv?e~!0h{-_d#kHcLDxT-&9poU@guA07 zhcxZoXO(GFd*K*MmFR(cSHv<8EONQ+SyVfMwA05JIguao6re84D!+JjuI@e(79N;e zZ`5nP^MWD&<9T#1R!QtfeN|2z?)J(8_`$pulDXSE(UvL0?{t{U43ScCIv}a}Eq}S2 z)kFq|)Euo{KBg(!vDZ$lg!dvv7Y?s)&`Z^jE|eVm_#1)%1=`1XASH0v(F5+= zJNy;I7PXlB*0-Qw+;Gp$CgfkGx~(*Hgx-Eo*pWG%{1N}r)jN9FhyA@#K14lLbxzA@ zG@Wv#)$Iue@rbI>je{cd<@dala{->rshwwLCRu!RF>7${6nyE}xQrte*Y^11VKqhW ziYp_tPd>Vi^QHMg&JmD)&)s8 zDR>@|0FN@!B zvATj}Bb6=LZ*ZAdx?MsJzDt&@Hk`j~jAk(NF=jTCkh1RcC1;Dt@X>f$r&~!&aFd8~ z_7RTxej%e+wwVwH$Utq&nq>>!nbR;veJCiI2({)0mXU1lQJ8{)E?CM2*~A;>_c%@&PI=FZp&O>JmK}Cr|_A^w;MOUMu4CKSrr+efVnrIuRB?Y0PYx zayoa{C@~#Lo(elKcMWi53QT!!CD97u9{<|Id~G{^Pi7)52w$PrfqDYyX{5m>p#j< zwtR?qaZCywMV}3gqkuXRBnUy`?0#HusRdxccJVE=JY?ssywgoObVnb!UVb3*+E>X_ zn70v%4V(NxWut)_$Pw@6Xs_y%kpykY;>}W{ce`Wv2X4RbRfFlItq%gMfoIb9F;fn{Fnh}#h@dIOT=S)ZVQ6i-qJy#PxU zzXA^*cgrR8$fRBn#Z&2)kB&lsNR!czvV?SI^xY8=^y>hgN(t6P3Od3K??tXjousY- z8inD)gr&Z=MoX!->jK!#U2CIp-k;NC(5tM?*h$wZ_AnLu15qOHTS741JB#|Xdgw2< z%F_7ffy)SZ8c4G>$k5Z%3HCa z*5mq0>o|l)qp@6Ne}2gj2n{{>_+nc+7M-$jHodo8K|wh4SEaIqk`s_ubl*{->5wv> zP8hxO#uGzSY_!Iu4I`(2lG4CErOc9Ms3KixZzi_MYye5nf6MksnsT|w#Y{q)X3YQ|FK(z^@WaaFzzUB#73>z z;c@LD6vn5nPc_e|qqR|K&oP(uyFThAB@za`_O6KZmCs?VDj=?1VrqT{HLZD+lGQI7 zxp6nUSVA?OA?KcW;^I%S*wM7rGtz_<(EafuuvY^8RgMFkDL2^MCmsB4HGqL2x5%%n z$8lX-NN&syZ1y$QJK@lwpWc8TlT``f4Z(1*3D*Ewwl zNJqYO2|adh@&oqL9(i4~p8V_7Dk<(gasJb)eWv(7d&&QCarwUvSmom&MfpW`YxpuB zmBJZZKON)v%d1ZYB|SRI0Oa7A7gzstVSt(7Bu*kba~<(s+;S#;ZCdG7b4w4JA9h?* zRqj>Ru82*?J{VThyep*<;EFl|U#zAa{-y|Wdy+25dX^Ae{=z2(*X@kE^Dq@dgJx3t9+MSj z>pMI753*#a)dMCHNzEY-St@B>&U2nAUYYHc^+N1^4pvlibwy>e!QjI`rgksZx-j3K z@g3hL{DgJMP=oTW8SpKCTRv+jh99X5EL6&K1xSXzl+;a7a@kmTPx(|9+Ly<4kJfpj zL<2zXw^2oD2Xb8lCMLHV_YyFOudD@aSIZtZKNW4Jrd6tYe{<4?@qPOyDL%^bPKuy8 zZ}retW(FvT3UMP&)aEQRE4CWp!LD3N4HVj=1Ak#a);SZC?|xW7zspryq07~7S#dPF z$IPu^4)igb;B~Zl#8>;C{{@ZGH2=+}hV(v^B0BwkvI~6vxl!X*Jk+!~>~G7ri-Xed z*WQbd&h{SUu)I7c}TvM*lSlMof@S&u8MU9sTExuHU>*d z_Em}Y96?u$i)jyg5fTvcp4!2<+a$M4rq3eIy5Qnuke4?lPsO7PqIu2b{!X=8Z{nSw z&DKnB>g15?vt;Xv@6m_O&wlYz(l;Jyx_ zBIAp$9CFujaZ5m4jHN}9rj%e&jNtLu^CuU#KB7g$RmTt1@tf7KXZ4i?s|uSwinIf= zpE5TsECgNm7N0;S-a(;Hk(q~5%M7mLcZUK*)8IUXC&Dyyi=706{F$OL!9L%@{a+AY z#Y9_b>Wq}%o0oF=x_sI2pKlQuC;{vmgd=>vy;*ga5by6W)m0citaF>LgzZ-`mR0*NpbgUl5Vxq{r&6JT<}Wk}>z`^O;-ROrXYU~!?o)qB zEGI&6zeBCtXEJ5)8}LDA}b$EkSa zuFv-}6s^rvxCO%$!fE~*3+qQ0u=b>g9-c3VUFsCB`fEFXZ}^9*Ri7|JsG*0dM;oZi z$M|VCUZgmDXi&Jl!l-9`dhz=sGmW|P^ZwK}f%y+*m6*VSq(yLP5^#GiH$Sn)1hPSU zhKY?Y2gcdc$E`{LLeCX3Z>A;d^tl$#Ahq%#(oKf4gCa`Oin0v1#R{%_FUU-Iv$CaU zq78HkRyB1yI>hUKKF^k4Y@bnZGb9w^IZUF<&F4~+_bi{tOIAcht6I^h10kNIN_)1<1ETOU;4YiKXP^sK;Ym zjk;4Q$_G-^U;A7Iij5pLJUbT0!ibsm_+%C9<=VbZB(=hE7eRicJ_q!ylt?&*R+nb~ zx2f2B{zepCl4k?&{mo;4UpK%U6He5K!<%|oE$%LGVxpGwvfmrN&JkhsLqlG>CfKP( zS1un^f2c?1u$JvrsMUcWiEtadl_vZ`jtuM+WKk0c^G%F>gm22rm6(oIHowd0kRsFOy)6ONmuXjLXnTRP!+Q-l(zMqtdezOb z{j^5bs@YmUz}_7&Wuv}Z(H{C|`gKaQuC-))a?%D-YV=9WeT@prxHi#&{x@TdNGeCK4qMJYYkK05z!prRGY0nxYCan$Z zAdOEhP&jq=TZ*3=#REx<(o5rY8dPQVzVB@M{eFt8zAjyLI|(8#0$wnUXN!k+)~I=Z zv*^!%hA3cjwlL9}nngHAUg1!u(D`_`usQbN52Yj?v_lLT3BLl7mtuL{_=>mEoso3m0hX zEVdJ@_zv^o^3u*{dHGVTQ5lQ5(q*uh*;2(|@mK0PM_J>Y zjMPc9dxF&iks{OHjBM0GiGQXByUGJV3hqW3+5XS2mb~BXFOJ5q2Tl=uy);<&KLg+$ zQtcpLxTZiZ&vmyA0Evh&DS9HsG1&o)HuHx?flUne-LuxEB*tihN#Sp-gnnvdBDxs>%#e|;!i zdp$5sT-6B|69b=7B0DIh7kmw{{9zh2(e)fkDz&IJ4#L*cH(iJL1+0TUFR=iQpRYtU zPr!r&)R@F4Ef5;9ZUxYWRp1V6v#xEdtm->~=)AB;GZ9MNN(r0FIx=__6L z-r9KIz!UamwIeDecaaN$2rT zrch?NN|~~Geuo-(dwZT0`Vj%+xVFZDQ@(vmR3;1PCl)OcXxx$H=a5jsv!!O|wcA)> z5%fEM!D*QWf7~H@Tjn2TUCr3KF}j{YWAk5LcmCX#p;Q(6Rs|WA?jpMB-Qfk(*>o)w zSm7b^-FuqiEXN~iHi&@ruWyr=?PZh`LOfWzd49wk6h{CY4sL;-)_1uy&RGr?EaE>` zKSjMvweNd-xh$4yXSl>JPRTi2(rIyK9piD3t_5bWVQMt*)ra{M;Lbv7Wn?=vv4uv& z#H!2`KP1MM4#%ycM{#YEYJ4BW!n!)ShZ{ypE8(@QPq!&XsvRSO1x}2u@SI%4%MkJT>&Y^h~ zXP;GaH8f~B*#B_o?`+HAkRDI>LR!FzqY&&Tw`3TnY>7$Zj){cIbaM}l^2ayMu8{s#!1#W!2cl+%@ge{C4u~8L!)JEEK zpn=N+j|#jZ{;M9lGD|Jfcsp#l|2y8&1Xnz73Ac@Rro-hfQ;M52R4-yNShH5yw)N4? z*(Jti9BL^}HJ_u{iMVlTF76R_lj2n1$(5cXa8MaszbKDGy+GeXPZCy@k(~%p-EZZC zdTm@*W!!yDgGXP9DX~|%Zm`iv2az$IgTq0gRW=ckk<= zP<8I_Pf}8|Y%M19NuB+lg}wpZgI87`q-vD$G!~-*%V5(x8`AsBh(`R~UPYm??-CRBY(3#@qAGz!t}7opMXAFlYKbs!_W2LQ%FN zcRIeFpRIPdWe32skTV+G}MzV-he>EhGl;qc6-ljX!-gbe`M%T#qEiX+CbB3CzfDO1+#tWLv zP;s8r+%RSI;nIy%FVn~GL(;=ocpJBfD_eTq>b$v9sawmeaIEVx1*BR#N&c>?zxe|- z$Jbjc)ej)`T=&Mq4MQ)2zqL-c*Rr}*IV3#79DjAiAb92}wQhNpQ+Zrx8>{=R4?x6_ z80|ZLihFmVKsww;_S5Ri$Lsk{CDdE;k#G3~!;a%0%7f6>2oQ548_QR zEYC>b1QF6Qd3v88Qi;MT?f3Nd>z&pwODCxShLAP|Y5K157TP`(wbrkgN*mho<3FF7 z2s6h*)4#5^B?g=7;n(ZTBKO+gR#tZWd)z!^#&v=TdQ!z6JnEv_n((R03)6}N=@y53 z@~wgUHtrI}7hCKu)gCK~9%jgtM zUPM&4a7`gLg;SM~ayjD0$`7M)sa{BVOk(qBjoY-sYReO6yeMO)K}*q%>3G3G)SkT% z2{RSPORJlpZ*6`ZXcT}rb=C#h35eQjb7hJz{hVPc+moW5pW1^wC=X>Y?kLv3(0%)` zlp24DSdwY_*ZF^$O$smnsc3m@s{r*a+NH*f5w_P14_bXi0S7c)6&+0`dm-`EFOSf3BSbh zz`OQGgpU`=^IA2&YY!U`7d`Xfd8zq?`%bB=cmepYFW1;`Xirjh4hj>q6=}Sw5Y)#= z`bYgPy~&`A3>kbCou7-`SJ}9Fc4&H|JiKl$4NHktRS>=i;HJNCq5k*kB{2Lq^Qe#7 zYV$ue3P;5Mtx+CQ!3$-T{(G=N{Qr=Y{{8u{i245qVEnwt+mtKnE9>h+U_5-v^6GLG IG8V!A1wT>sz5oCK literal 0 HcmV?d00001 diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml new file mode 100644 index 00000000000..a35993435ac --- /dev/null +++ b/docker-compose.dev.yml @@ -0,0 +1,105 @@ +# mkdir -p volumes/{lib,pecan,portainer,postgres,rabbitmq,traefik} +# +# docker-compose -f docker-compose.yml -f docker-compose.dev.yml + +version: '3.2' + +services: + + # web application. This expects the config.php to be copied from docker/web + # cp docker/web/config.docker.php web/config.php + #web: + # volumes: + # - 'pecan_web:/var/www/html/pecan' + + # executor can compile the code + executor: + volumes: + - 'pecan_home:/pecan/' + - 'pecan_lib:/usr/local/lib/R/site-library/' + + # use same for R development in rstudio + rstudio: + volumes: + - 'pecan_home:/pecan/' + - 'pecan_home:/home/carya/pecan/' + - 'pecan_lib:/usr/local/lib/R/site-library/' + + # use following as template for other models + # this can be used if you are changng the code for a model in PEcAN + sipnet: + volumes: + - 'pecan_lib:/usr/local/lib/R/site-library/' + + # this will open postgres to the hostcompute + #postgres: + # ports: + # - '5432:5432' + + # Allow to see all docker containers running, restart and see log files. + #portainer: + # image: portainer/portainer:latest + # command: + # - --admin-password=${PORTAINER_PASSWORD:-} + # - --host=unix:///var/run/docker.sock + # restart: unless-stopped + # networks: + # - pecan + # labels: + # - "traefik.enable=true" + # - "traefik.backend=portainer" + # - "traefik.frontend.rule=${TRAEFIK_FRONTEND_RULE:-}PathPrefixStrip: /portainer" + # - "traefik.website.frontend.whiteList.sourceRange=${TRAEFIK_IPFILTER:-172.16.0.0/12}" + # volumes: + # - /var/run/docker.sock:/var/run/docker.sock + # - portainer:/data + +# ----------------------------------------------------------------------- +# Theser are the volumes mounted into the containers. For speed reasons +# it is best to use docker native containers (less important on Linux). +# The pecan_home and pecan_web are important since this allows us to +# share the PEcAn source code from local machine to docker containers. +# Volumes are placed outside of the PEcAn source tree to allow for +# optimized caching of the changed files. +# ----------------------------------------------------------------------- +volumes: + pecan_home: + driver_opts: + type: none + device: '${PWD}' + o: bind + pecan_web: + driver_opts: + type: none + device: '${PWD}/web/' + o: bind + pecan_lib: + # driver_opts: + # type: none + # device: '${HOME}/volumes/pecan/lib' + # o: bind + #pecan: + # driver_opts: + # type: none + # device: '${HOME}/volumes/pecan/pecan' + # o: bind + #traefik: + # driver_opts: + # type: none + # device: '${HOME}/volumes/pecan/traefik' + # o: bind + #postgres: + # driver_opts: + # type: none + # device: '${HOME}/volumes/pecan/postgres' + # o: bind + #rabbitmq: + # driver_opts: + # type: none + # device: '${HOME}/volumes/pecan/rabbitmq' + # o: bind + portainer: + # driver_opts: + # type: none + # device: '${HOME}/volumes/pecan/portainer' + # o: bind diff --git a/docker-compose.yml b/docker-compose.yml index e0f5661b0fb..64843b53e11 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,4 +1,4 @@ -version: "3" +version: "3.2" services: @@ -43,24 +43,6 @@ services: - /var/run/docker.sock:/var/run/docker.sock:ro - traefik:/config - # Allow to see all docker containers running, restart and see log files. - portainer: - image: portainer/portainer:latest - command: - - --admin-password=${PORTAINER_PASSWORD:-} - - --host=unix:///var/run/docker.sock - restart: unless-stopped - networks: - - pecan - labels: - - "traefik.enable=true" - - "traefik.backend=portainer" - - "traefik.frontend.rule=${TRAEFIK_FRONTEND_RULE:-}PathPrefixStrip: /portainer" - - "traefik.website.frontend.whiteList.sourceRange=${TRAEFIK_IPFILTER:-172.16.0.0/12}" - volumes: - - /var/run/docker.sock:/var/run/docker.sock - - portainer:/data - # ---------------------------------------------------------------------- # Access to the files generated and used by PEcAn, both through a # web interface (minio) as well using the thredds server. @@ -163,6 +145,8 @@ services: image: pecan/rstudio-nginx:${PECAN_VERSION:-latest} networks: - pecan + depends_on: + - rstudio labels: - "traefik.enable=true" - "traefik.backend=rstudio" @@ -176,6 +160,9 @@ services: networks: - pecan environment: + - RABBITMQ_URI=${RABBITMQ_URI:-amqp://guest:guest@rabbitmq/%2F} + - FQDN=${PECAN_FQDN:-docker} + - NAME=${PECAN_NAME:-docker} - USER=${PECAN_RSTUDIO_USER:-carya} - PASSWORD=${PECAN_RSTUDIO_PASS:-illinois} entrypoint: /init @@ -310,9 +297,9 @@ services: volumes: - pecan:/data -# ---------------------------------------------------------------------- -# Shiny Apps -# ---------------------------------------------------------------------- + # ---------------------------------------------------------------------- + # Shiny Apps + # ---------------------------------------------------------------------- # PEcAn DB Sync visualization dbsync: image: pecan/shiny-dbsync:${PECAN_VERSION:-latest} @@ -327,6 +314,27 @@ services: - "traefik.port=3838" - "traefik.frontend.rule=${TRAEFIK_FRONTEND_RULE:-}PathPrefixStrip:/dbsync/" + + # ---------------------------------------------------------------------- + # PEcAn API + # ---------------------------------------------------------------------- + api: + image: pecan/api:${PECAN_VERSION:-latest} + networks: + - pecan + environment: + - PECAN_VERSION=${PECAN_VERSION:-1.7.0} + - PECAN_GIT_BRANCH=${PECAN_GIT_BRANCH:-develop} + - PECAN_GIT_CHECKSUM=${PECAN_GIT_CHECKSUM:-unknown} + - PECAN_GIT_DATE=${PECAN_GIT_DATE:-unknown} + - PGHOST=${PGHOST:-postgres} + - HOST_ONLY=${HOST_ONLY:-FALSE} + - AUTH_REQ=${AUTH_REQ:-TRUE} + labels: + - "traefik.enable=true" + - "traefik.frontend.rule=${TRAEFIK_FRONTEND_RULE:-}PathPrefix:/" + - "traefik.backend=api" + # ---------------------------------------------------------------------- # Name of network to be used by all containers # ---------------------------------------------------------------------- diff --git a/docker.sh b/docker.sh index 3f05e57c524..372093d7279 100755 --- a/docker.sh +++ b/docker.sh @@ -61,7 +61,7 @@ IMAGE_VERSION or the option -i. To run the script in debug mode without actually building any images you can use the environment variable DEBUG or option -d. -By default the docker.sh process will try and use a prebuild dependency +By default the docker.sh process will try and use a prebuilt dependency image since this image takes a long time to build. To force this image to be build use the DEPEND="build" environment flag, or use option -f. @@ -103,8 +103,8 @@ echo "# test this build you can use:" echo "# PECAN_VERSION='${IMAGE_VERSION}' docker-compose up" echo "#" echo "# The docker image for dependencies takes a long time to build. You" -echo "# can use a prebuild version (default) or force a new versin to be" -echo "# build locally using: DEPEND=build $0" +echo "# can use a prebuilt version (default) or force a new version to be" +echo "# built locally using: DEPEND=build $0" echo "# ----------------------------------------------------------------------" # not building dependencies image, following command will build this @@ -188,7 +188,7 @@ for version in 0.95; do done # build ed2 -for version in git 2.2.0; do +for version in 2.2.0; do ${DEBUG} docker build \ --tag pecan/model-ed2-${version}:${IMAGE_VERSION} \ --build-arg MODEL_VERSION="${version}" \ @@ -214,3 +214,19 @@ for version in git r136; do --build-arg IMAGE_VERSION="${IMAGE_VERSION}" \ models/sipnet done + +# -------------------------------------------------------------------------------- +# PEcAn Apps +# -------------------------------------------------------------------------------- + +# build API +for x in api; do + ${DEBUG} docker build \ + --tag pecan/$x:${IMAGE_VERSION} \ + --build-arg IMAGE_VERSION="${IMAGE_VERSION}" \ + --build-arg PECAN_VERSION="${VERSION}" \ + --build-arg PECAN_GIT_BRANCH="${PECAN_GIT_BRANCH}" \ + --build-arg PECAN_GIT_CHECKSUM="${PECAN_GIT_CHECKSUM}" \ + --build-arg PECAN_GIT_DATE="${PECAN_GIT_DATE}" \ + apps/$x/ +done diff --git a/docker/depends/Dockerfile b/docker/depends/Dockerfile index 2e419aab5ff..fc309ad12d7 100644 --- a/docker/depends/Dockerfile +++ b/docker/depends/Dockerfile @@ -10,7 +10,7 @@ MAINTAINER Rob Kooper # UPDATE GIT # This is needed for stretch and github actions # ---------------------------------------------------------------------- -RUN if [ "$(lsb_release -s -c)" == "stretch" ]; then \ +RUN if [ "$(lsb_release -s -c)" = "stretch" ]; then \ echo 'deb http://deb.debian.org/debian stretch-backports main' >> /etc/apt/sources.list \ && apt-get update \ && apt-get -t stretch-backports upgrade -y git \ diff --git a/docker/depends/pecan.depends b/docker/depends/pecan.depends index cd041decf6e..6228573ce78 100644 --- a/docker/depends/pecan.depends +++ b/docker/depends/pecan.depends @@ -111,8 +111,8 @@ install2.r -e -s -l "${RLIB}" -n -1\ tictoc \ tidyr \ tidyverse \ - tmvtnorm \ tools \ + TruncatedNormal \ truncnorm \ udunits2 \ utils \ diff --git a/docker/env.example b/docker/env.example index e2083fc8e86..ee0ca6d8ec1 100644 --- a/docker/env.example +++ b/docker/env.example @@ -6,7 +6,7 @@ # ---------------------------------------------------------------------- # project name (-p flag for docker-compose) -#COMPOSE_PROJECT_NAME=dev +#COMPOSE_PROJECT_NAME=pecan # ---------------------------------------------------------------------- # TRAEFIK CONFIGURATION diff --git a/docker/executor/Dockerfile b/docker/executor/Dockerfile index 4b68c7ba897..0cf5622e487 100644 --- a/docker/executor/Dockerfile +++ b/docker/executor/Dockerfile @@ -20,7 +20,7 @@ WORKDIR /work # variables to store in docker image ENV RABBITMQ_URI="amqp://guest:guest@rabbitmq/%2F" \ RABBITMQ_QUEUE="pecan" \ - APPLICATION="R CMD BATCH workflow.R" + APPLICATION="workflow" # actual application that will be executed COPY executor.py sender.py /work/ diff --git a/docker/executor/executor.py b/docker/executor/executor.py index b79e089e277..a432df87ecb 100644 --- a/docker/executor/executor.py +++ b/docker/executor/executor.py @@ -55,6 +55,14 @@ def runfunc(self): application = "R CMD BATCH workflow.R" elif custom_application is not None: application = custom_application + elif default_application == "workflow": + application = "R CMD BATCH" + if jbody.get("continue") == True: + application = application + " --continue workflow.R workflow2.Rout"; + else: + if jbody.get("modeledit") == True: + application = application + " --advanced" + application = application + " workflow.R workflow.Rout"; else: logging.info("Running default command: %s" % default_application) application = default_application diff --git a/models/biocro/R/call_biocro.R b/models/biocro/R/call_biocro.R index 17f029b32c3..226bcbca1f8 100644 --- a/models/biocro/R/call_biocro.R +++ b/models/biocro/R/call_biocro.R @@ -10,7 +10,7 @@ call_biocro_0.9 <- function(WetDat, genus, year_in_run, # Check that all variables are present in the expected order -- # BioGro < 1.0 accesses weather vars by position and DOES NOT check headers. expected_cols <- c("year", "doy", "hour", "[Ss]olar", "Temp", "RH", "WS|windspeed", "precip") - if(!all(mapply(grepl, expected_cols, colnames(WetDat)))){ + if (!all(mapply(grepl, expected_cols, colnames(WetDat)))) { PEcAn.logger::logger.severe("Format error in weather file: Columns must be (", expected_cols, "), in that order.") } day1 <- min(WetDat$doy) # data already subset upstream, but BioCro 0.9 assumes a full year if day1/dayn are unset @@ -26,38 +26,46 @@ call_biocro_0.9 <- function(WetDat, genus, year_in_run, # If not, rescale day1 and dayn to be relative to the start of the input. # Scaling is derived by inverting Biocro's day->index equations. biocro_checks_doy <- tryCatch( - {m <- BioCro::BioGro( - WetDat = matrix(c(0,10,0,0,0,0,0,0), nrow = 1), - day1 = 10, dayn = 10, timestep = 24); - inherits(m, "BioGro") }, - error = function(e){FALSE}) - if (!biocro_checks_doy && min(WetDat[,"doy"])>1) { - if (!is.null(day1)){ + { + m <- BioCro::BioGro( + WetDat = matrix(c(0, 10, 0, 0, 0, 0, 0, 0), nrow = 1), + day1 = 10, dayn = 10, timestep = 24 + ) + inherits(m, "BioGro") + }, + error = function(e) { + FALSE + } + ) + if (!biocro_checks_doy && min(WetDat[, "doy"]) > 1) { + if (!is.null(day1)) { # Biocro calculates line number as `indes1 <- (day1 - 1) * 24` - indes1 <- Position(function(x)x==day1, WetDat[,"doy"]) - day1 <- indes1/24 + 1 + indes1 <- Position(function(x) x == day1, WetDat[, "doy"]) + day1 <- indes1 / 24 + 1 } - if (!is.null(dayn)){ + if (!is.null(dayn)) { # Biocro calculates line number as `indesn <- (dayn) * 24` - indesn <- Position(function(x)x==dayn, WetDat[,"doy"], right = TRUE) - dayn <- indesn/24 + indesn <- Position(function(x) x == dayn, WetDat[, "doy"], right = TRUE) + dayn <- indesn / 24 } } - coppice.interval = config$pft$coppice.interval - if(is.null(coppice.interval)) { - coppice.interval = 1 # i.e. harvest every year + coppice.interval <- config$pft$coppice.interval + if (is.null(coppice.interval)) { + coppice.interval <- 1 # i.e. harvest every year } if (genus == "Saccharum") { + # probably should be handled like coppice shrubs or perennial grasses tmp.result <- BioCro::caneGro( WetDat = WetDat, lat = lat, - soilControl = l2n(config$pft$soilControl)) - # Addin Rhizome an Grain to avoid error in subsequent script processing results + soilControl = l2n(config$pft$soilControl) + ) + # Addin Rhizome and Grain to avoid error in subsequent script processing results tmp.result$Rhizome <- 0 tmp.result$Grain <- 0 - } else if (genus %in% c("Salix", "Populus")) { + } else if (genus %in% c("Salix", "Populus")) { # coppice trees / shrubs if (year_in_run == 1) { iplant <- config$pft$iPlantControl } else { @@ -65,13 +73,13 @@ call_biocro_0.9 <- function(WetDat, genus, year_in_run, iplant$iRoot <- data.table::last(tmp.result$Root) iplant$iStem <- data.table::last(tmp.result$Stem) - if ((year_in_run - 1)%%coppice.interval == 0) { + if ((year_in_run - 1) %% coppice.interval == 0) { # coppice when remainder = 0 HarvestedYield <- round(data.table::last(tmp.result$Stem) * 0.95, 2) - } else if ((year_in_run - 1)%%coppice.interval == 1) { + } else if ((year_in_run - 1) %% coppice.interval == 1) { # year after coppice iplant$iStem <- iplant$iStem * 0.05 - } # else { # do nothing if neither coppice year nor year following + } # else { # do nothing if neither coppice year nor year following } ## run willowGro tmp.result <- BioCro::willowGro( @@ -85,9 +93,9 @@ call_biocro_0.9 <- function(WetDat, genus, year_in_run, canopyControl = l2n(config$pft$canopyControl), willowphenoControl = l2n(config$pft$phenoParms), seneControl = l2n(config$pft$seneControl), - photoControl = l2n(config$pft$photoParms)) - - } else if (genus %in% c("Miscanthus", "Panicum")) { + photoControl = l2n(config$pft$photoParms) + ) + } else if (genus %in% c("Miscanthus", "Panicum")) { # perennial grasses if (year_in_run == 1) { iRhizome <- config$pft$iPlantControl$iRhizome } else { @@ -104,35 +112,33 @@ call_biocro_0.9 <- function(WetDat, genus, year_in_run, phenoControl = l2n(config$pft$phenoParms), seneControl = l2n(config$pft$seneControl), iRhizome = as.numeric(iRhizome), - photoControl = config$pft$photoParms) - - } else if (genus %in% c("Sorghum", "Setaria")) { - if (year_in_run == 1) { - iplant <- config$pft$iPlantControl - } else { - iplant$iRhizome <- data.table::last(tmp.result$Rhizome) - iplant$iRoot <- data.table::last(tmp.result$Root) - iplant$iStem <- data.table::last(tmp.result$Stem) - } + photoControl = config$pft$photoParms + ) + } else if (genus %in% c("Sorghum", "Setaria")) { # annual grasses + # Perennial Sorghum exists but is not a major crop + # assume these are replanted from seed each year + # https://landinstitute.org/our-work/perennial-crops/perennial-sorghum/ + iplant <- config$pft$iPlantControl ## run BioGro tmp.result <- BioCro::BioGro( WetDat = WetDat, iRhizome = as.numeric(iplant$iRhizome), iRoot = as.numeric(iplant$iRoot), iStem = as.numeric(iplant$iStem), - iLeaf = as.numeric(iplant$iLeaf), + iLeaf = as.numeric(iplant$iLeaf), day1 = day1, dayn = dayn, soilControl = l2n(config$pft$soilControl), canopyControl = l2n(config$pft$canopyControl), phenoControl = l2n(config$pft$phenoParms), seneControl = l2n(config$pft$seneControl), - photoControl = l2n(config$pft$photoParms)) - + photoControl = l2n(config$pft$photoParms) + ) } else { PEcAn.logger::logger.severe( "Genus '", genus, "' is not supported by PEcAn.BIOCRO when using BioCro 0.9x.", - "Supported genera: Saccharum, Salix, Populus, Sorghum, Miscanthus, Panicum, Setaria") + "Supported genera: Saccharum, Salix, Populus, Sorghum, Miscanthus, Panicum, Setaria" + ) } names(tmp.result) <- sub("DayofYear", "doy", names(tmp.result)) names(tmp.result) <- sub("Hour", "hour", names(tmp.result)) @@ -148,7 +154,6 @@ call_biocro_0.9 <- function(WetDat, genus, year_in_run, call_biocro_1 <- function(WetDat, genus, year_in_run, config, lat, lon, tmp.result, HarvestedYield) { - if (year_in_run == 1) { initial_values <- config$pft$initial_values } else { @@ -162,13 +167,15 @@ call_biocro_1 <- function(WetDat, genus, year_in_run, initial_values = initial_values, parameters = config$pft$parameters, varying_parameters = WetDat, - modules = config$pft$modules) + modules = config$pft$modules + ) tmp.result <- dplyr::rename(tmp.result, ThermalT = "TTc", LAI = "lai", SoilEvaporation = "soil_evaporation", - CanopyTrans = "canopy_transpiration") + CanopyTrans = "canopy_transpiration" + ) tmp.result$AboveLitter <- tmp.result$LeafLitter + tmp.result$StemLitter tmp.result$BelowLitter <- tmp.result$RootLitter + tmp.result$RhizomeLitter diff --git a/models/ed/Dockerfile b/models/ed/Dockerfile index c1d64ba2e3c..34cad0989a1 100644 --- a/models/ed/Dockerfile +++ b/models/ed/Dockerfile @@ -7,8 +7,8 @@ ARG IMAGE_VERSION="latest" FROM debian:stretch as model-binary # Some variables that can be used to set control the docker build -ARG MODEL_VERSION=git -ARG BINARY_VERSION=2.1 +ARG MODEL_VERSION="2.2.0" +ARG BINARY_VERSION="2.2" # specify fortran compiler ENV FC_TYPE=GNU @@ -59,7 +59,7 @@ RUN apt-get update \ # ---------------------------------------------------------------------- # Some variables that can be used to set control the docker build -ARG MODEL_VERSION=git +ARG MODEL_VERSION="2.2.0" # Setup model_info file COPY model_info.json /work/model.json diff --git a/models/ed/R/model2netcdf.ED2.R b/models/ed/R/model2netcdf.ED2.R index 9b1a7047ed6..3170d9b8457 100644 --- a/models/ed/R/model2netcdf.ED2.R +++ b/models/ed/R/model2netcdf.ED2.R @@ -280,10 +280,23 @@ read_T_files <- function(yr, yfiles, tfiles, outdir, start_date, end_date, ...){ } } - CheckED2Version <- function(nc) { + CheckED2Variables <- function(nc) { + vars_detected <- NULL + name_convention <- NULL + if ("FMEAN_BDEAD_PY" %in% names(nc$var)) { - return("Git") + vars_detected <- c(vars_detected,"FMEAN_BDEAD_PY") + name_convention <- "Contains_FMEAN" + } + if ("FMEAN_SOIL_TEMP_PY" %in% names(nc$var)) { + vars_detected <- c(vars_detected, "FMEAN_SOIL_TEMP_PY") + name_convention <- "Contains_FMEAN" } + if(!is.null(vars_detected)){ + PEcAn.logger::logger.warn(paste("Found variable(s): ", paste(vars_detected, collapse = " "), ", now processing FMEAN* named variables. Note that varible naming conventions may change with ED2 version.")) + } + + return(name_convention) } # note that there is always one Tower file per year @@ -324,8 +337,8 @@ read_T_files <- function(yr, yfiles, tfiles, outdir, start_date, end_date, ...){ slzdata <- array(c(-2, -1.5, -1, -0.8, -0.6, -0.4, -0.2, -0.1, -0.05)) } - ## Check for which version of ED2 we are using. - ED2vc <- CheckED2Version(ncT) + ## Check for what naming convention of ED2 vars we are using. May change with ED2 version. + ED2vc <- CheckED2Variables(ncT) ## store for later use, will only use last data dz <- diff(slzdata) diff --git a/models/ed/inst/ED2IN.git b/models/ed/inst/ED2IN.git deleted file mode 100644 index 58a154572b2..00000000000 --- a/models/ed/inst/ED2IN.git +++ /dev/null @@ -1,1259 +0,0 @@ -!==========================================================================================! -!==========================================================================================! -! ED2IN . ! -! ! -! This is the file that contains the variables that define how ED is to be run. There ! -! is some brief information about the variables here. ! -!------------------------------------------------------------------------------------------! -$ED_NL - - !----- Simulation title (64 characters). -----------------------------------------------! - NL%EXPNME = 'ED2 vGITHUB PEcAn @ENSNAME@' - !---------------------------------------------------------------------------------------! - - !---------------------------------------------------------------------------------------! - ! Type of run: ! - ! INITIAL -- Starts a new run, that can be based on a previous run (restart/history), ! - ! but then it will use only the biomass and soil carbon information. ! - ! HISTORY -- Resumes a simulation from the last history. This is different from ! - ! initial in the sense that exactly the same information written in the ! - ! history will be used here. ! - !---------------------------------------------------------------------------------------! - NL%RUNTYPE = 'INITIAL' - !---------------------------------------------------------------------------------------! - - - - !---------------------------------------------------------------------------------------! - ! Start of simulation. Information must be given in UTC time. ! - !---------------------------------------------------------------------------------------! - NL%IMONTHA = @START_MONTH@ - NL%IDATEA = @START_DAY@ - NL%IYEARA = @START_YEAR@ - NL%ITIMEA = 0000 - !---------------------------------------------------------------------------------------! - - - - !---------------------------------------------------------------------------------------! - ! End of simulation. Information must be given in UTC time. ! - !---------------------------------------------------------------------------------------! - NL%IMONTHZ = @END_MONTH@ - NL%IDATEZ = @END_DAY@ - NL%IYEARZ = @END_YEAR@ - NL%ITIMEZ = 0000 - !---------------------------------------------------------------------------------------! - - - - !---------------------------------------------------------------------------------------! - ! DTLSM -- Time step to integrate photosynthesis, and the maximum time step for ! - ! integration of energy and water budgets (units: seconds). Notice that the ! - ! model will take steps shorter than this if this is too coarse and could ! - ! lead to loss of accuracy or unrealistic results in the biophysics. ! - ! Recommended values are < 60 seconds if INTEGRATION_SCHEME is 0, and 240-900 ! - ! seconds otherwise. ! - ! RADFRQ -- Time step to integrate radiation, in seconds. This must be an integer ! - ! multiple of DTLSM, and we recommend it to be exactly the same as DTLSM. ! - !---------------------------------------------------------------------------------------! - NL%DTLSM = 600. - NL%RADFRQ = 600. - !---------------------------------------------------------------------------------------! - - - - - !---------------------------------------------------------------------------------------! - ! The following variables are used in case the user wants to run a regional run. ! - ! ! - ! N_ED_REGION -- number of regions for which you want to run ED. This can be set to ! - ! zero provided that N_POI is not... ! - ! GRID_TYPE -- which kind of grid to run: ! - ! 0. Longitude/latitude grid ! - ! 1. Polar-stereographic ! - !---------------------------------------------------------------------------------------! - NL%N_ED_REGION = 0 - NL%GRID_TYPE = 0 - - !------------------------------------------------------------------------------------! - ! The following variables are used only when GRID_TYPE is set to 0. You must ! - ! provide one value for each grid, except otherwise noted. ! - ! ! - ! GRID_RES -- Grid resolution, in degrees (first grid only, the other grids ! - ! resolution will be defined by NSTRATX/NSTRATY). ! - ! ED_REG_LATMIN -- Southernmost point of each region. ! - ! ED_REG_LATMAX -- Northernmost point of each region. ! - ! ED_REG_LONMIN -- Westernmost point of each region. ! - ! ED_REG_LONMAX -- Easternmost point of each region. ! - !------------------------------------------------------------------------------------! - NL%GRID_RES = 1.0 ! This is the grid resolution scale in degrees. [\*/] - NL%ED_REG_LATMIN = -90 ! List of minimum latitudes; - NL%ED_REG_LATMAX = 90 ! List of maximum latitudes; - NL%ED_REG_LONMIN = -180 ! List of minimum longitudes; - NL%ED_REG_LONMAX = 180 ! List of maximum longitudes; - !------------------------------------------------------------------------------------! - - - - !------------------------------------------------------------------------------------! - ! The following variables are used only when GRID_TYPE is set to 1. ! - ! ! - ! NNXP -- number of points in the X direction. One value for each grid. ! - ! NNYP -- number of points in the Y direction. One value for each grid. ! - ! DELTAX -- grid resolution in the X direction, near the grid pole. Units: [ m]. ! - ! this value is used to define the first grid only, other grids are ! - ! defined using NNSTRATX. ! - ! DELTAY -- grid resolution in the Y direction, near the grid pole. Units: [ m]. ! - ! this value is used to define the first grid only, other grids are ! - ! defined using NNSTRATX. Unless you are running some specific tests, ! - ! both DELTAX and DELTAY should be the same. ! - ! POLELAT -- Latitude of the pole point. Set this close to CENTLAT for a more ! - ! traditional "square" domain. One value for all grids. ! - ! POLELON -- Longitude of the pole point. Set this close to CENTLON for a more ! - ! traditional "square" domain. One value for all grids. ! - ! CENTLAT -- Latitude of the central point. One value for each grid. ! - ! CENTLON -- Longitude of the central point. One value for each grid. ! - !------------------------------------------------------------------------------------! - NL%NNXP = 110 - NL%NNYP = 70 - NL%DELTAX = 60000. - NL%DELTAY = 60000. - NL%POLELAT = -2.609075 - NL%POLELON = -60.2093 - NL%CENTLAT = -2.609075 - NL%CENTLON = -60.2093 - !------------------------------------------------------------------------------------! - - - - !------------------------------------------------------------------------------------! - ! Nest ratios. These values are used by both GRID_TYPE=0 and GRID_TYPE=1. ! - ! NSTRATX -- this is will divide the values given by DELTAX or GRID_RES for the ! - ! nested grids. The first value should be always one. ! - ! NSTRATY -- this is will divide the values given by DELTAY or GRID_RES for the ! - ! nested grids. The first value should be always one, and this must ! - ! be always the same as NSTRATX when GRID_TYPE = 0, and this is also ! - ! strongly recommended for when GRID_TYPE = 1. ! - !------------------------------------------------------------------------------------! - NL%NSTRATX = 1,4 - NL%NSTRATY = 1,4 - !------------------------------------------------------------------------------------! - !---------------------------------------------------------------------------------------! - - - - !---------------------------------------------------------------------------------------! - ! The following variables are used to define single polygon of interest runs, and ! - ! they are ignored when N_ED_REGION = 0. ! - ! ! - ! N_POI -- number of polygons of interest (POIs). This can be zero as long as ! - ! N_ED_REGION is not. ! - ! POI_LAT -- list of latitudes of each POI. ! - ! POI_LON -- list of longitudes of each POI. ! - ! POI_RES -- grid resolution of each POI (degrees). This is used only to define the ! - ! soil types ! - !---------------------------------------------------------------------------------------! - NL%N_POI = 1 ! number of polygons of interest (POIs). This could be zero. - NL%POI_LAT = @SITE_LAT@ ! list of the latitudes of the POIs (degrees north) - NL%POI_LON = @SITE_LON@ ! list of the longitudes of the POIs (degrees east) - NL%POI_RES = 1.00 - !---------------------------------------------------------------------------------------! - !---------------------------------------------------------------------------------------! - ! LOADMETH -- Load balancing method. This is used only in regional runs run in ! - ! parallel. ! - ! 0. Let ED decide the best way of splitting the polygons. Commonest ! - ! option and default. ! - ! 1. One of the methods to split polygons based on their previous ! - ! work load. Developpers only. ! - ! 2. Try to load an equal number of SITES per node. Useful for when ! - ! total number of polygon is the same as the total number of cores. ! - ! 3. Another method to split polygons based on their previous work load. ! - ! Developpers only. ! - !---------------------------------------------------------------------------------------! - NL%LOADMETH = 0 - !---------------------------------------------------------------------------------------! - - - - - !---------------------------------------------------------------------------------------! - ! ED2 File output. For all the variables 0 means no output and 3 means HDF5 output. ! - ! ! - ! IFOUTPUT -- Fast analysis. These are mostly polygon-level averages, and the time ! - ! interval between files is determined by FRQANL ! - ! IDOUTPUT -- Daily means (one file per day) ! - ! IMOUTPUT -- Monthly means (one file per month) ! - ! IQOUTPUT -- Monthly means of the diurnal cycle (one file per month). The number ! - ! of points for the diurnal cycle is 86400 / FRQANL ! - ! IYOUTPUT -- Annual output. ! - ! ITOUTPUT -- Instantaneous fluxes, mostly polygon-level variables, one file per year. ! - ! ISOUTPUT -- restart file, for HISTORY runs. The time interval between files is ! - ! determined by FRQHIS ! - !---------------------------------------------------------------------------------------! - NL%IFOUTPUT = 0 ! Instantaneous analysis (site average) - NL%IDOUTPUT = 0 ! Daily means (site average) - NL%IMOUTPUT = 0 ! Monthly means (site average) - NL%IQOUTPUT = 0 ! Monthly means (diurnal cycle) - NL%IYOUTPUT = 3 ! Annual output - NL%ITOUTPUT = 3 ! Instantaneous fluxes (site average) --> "Tower" Files - NL%ISOUTPUT = 0 ! History files - !---------------------------------------------------------------------------------------! - - - - !---------------------------------------------------------------------------------------! - ! The following variables control whether site-, patch-, and cohort-level time ! - ! means and mean sum of squares should be included in the output files or not. ! - ! ! - ! Reasons to add them: ! - ! a. Sub-polygon variables are more comprehensive. ! - ! b. Explore heterogeneity within a polygon and make interesting analysis. ! - ! c. More chances to create cool 3-D plots. ! - ! ! - ! Reasons to NOT add them: ! - ! a. Output files will become much larger! ! - ! b. In regional/coupled runs, the output files will be ridiculously large. ! - ! c. You may fill up the disk. ! - ! d. Other people's job may crash due to insufficient disk space. ! - ! e. You will gain a bad reputation amongst your colleagues. ! - ! f. And it will be entirely your fault. ! - ! ! - ! Either way, polygon-level averages are always included, and so are the instan- ! - ! taneous site-, patch-, and cohort-level variables needed for resuming the run. ! - ! ! - ! IADD_SITE_MEANS -- Add site-level averages to the output (0 = no; 1 = yes) ! - ! IADD_PATCH_MEANS -- Add patch-level averages to the output (0 = no; 1 = yes) ! - ! IADD_COHORT_MEANS -- Add cohort-level averages to the output (0 = no; 1 = yes) ! - ! ! - !---------------------------------------------------------------------------------------! - NL%IADD_SITE_MEANS = 0 - NL%IADD_PATCH_MEANS = 0 - NL%IADD_COHORT_MEANS = 0 - !---------------------------------------------------------------------------------------! - - - - !---------------------------------------------------------------------------------------! - ! ATTACH_METADATA -- Flag for attaching metadata to HDF datasets. Attaching metadata ! - ! will aid new users in quickly identifying dataset descriptions but ! - ! will compromise I/O performance significantly. ! - ! 0 = no metadata, 1 = attach metadata ! - !---------------------------------------------------------------------------------------! - NL%ATTACH_METADATA = 0 - !---------------------------------------------------------------------------------------! - - - - !---------------------------------------------------------------------------------------! - ! UNITFAST -- The following variables control the units for FRQFAST/OUTFAST, and ! - ! UNITSTATE FRQSTATE/OUTSTATE, respectively. Possible values are: ! - ! 0. Seconds; ! - ! 1. Days; ! - ! 2. Calendar months (variable) ! - ! 3. Calendar years (variable) ! - ! ! - ! N.B.: 1. In case OUTFAST/OUTSTATE are set to special flags (-1 or -2) ! - ! UNITFAST/UNITSTATE will be ignored for them. ! - ! 2. In case IQOUTPUT is set to 3, then UNITFAST has to be 0. ! - ! ! - !---------------------------------------------------------------------------------------! - NL%UNITFAST = 0 - NL%UNITSTATE = 3 - !---------------------------------------------------------------------------------------! - - - - !---------------------------------------------------------------------------------------! - ! OUTFAST/OUTSTATE -- these control the number of times per file. ! - ! 0. Each time gets its own file ! - ! -1. One file per day ! - ! -2. One file per month ! - ! > 0. Multiple timepoints can be recorded to a single file reducing ! - ! the number of files and i/o time in post-processing. ! - ! Multiple timepoints should not be used in the history files ! - ! if you intend to use these for HISTORY runs. ! - !---------------------------------------------------------------------------------------! - NL%OUTFAST = -1 ! orig. 3600. - NL%OUTSTATE = 0 ! orig. 1. - !---------------------------------------------------------------------------------------! - - - - !---------------------------------------------------------------------------------------! - ! ICLOBBER -- What to do in case the model finds a file that it was supposed the ! - ! written? 0 = stop the run, 1 = overwrite without warning. ! - ! FRQFAST -- time interval between analysis files, units defined by UNITFAST. ! - ! FRQSTATE -- time interval between history files, units defined by UNITSTATE. ! - !---------------------------------------------------------------------------------------! - NL%ICLOBBER = 1 ! 0 = stop if files exist, 1 = overwite files - NL%FRQFAST = 1800. ! Time interval between analysis/history files - NL%FRQSTATE = 86400. ! Time interval between history files - !---------------------------------------------------------------------------------------! - - - - !---------------------------------------------------------------------------------------! - ! FFILOUT -- Path and prefix for analysis files (all but history/restart). ! - ! SFILOUT -- Path and prefix for history files. ! - !---------------------------------------------------------------------------------------! - NL%FFILOUT = '@FFILOUT@' ! Analysis output prefix; - NL%SFILOUT = '@SFILOUT@' ! History output prefix - !---------------------------------------------------------------------------------------! - - - - !---------------------------------------------------------------------------------------! - ! IED_INIT_MODE -- This controls how the plant community and soil carbon pools are ! - ! initialised. ! - ! ! - ! -1. Start from a true bare ground run, or an absolute desert run. This will ! - ! never grow any plant. ! - ! 0. Start from near-bare ground (only a few seedlings from each PFT to be included ! - ! in this run). ! - ! 1. This will use history files written by ED-1.0. It will grab the ecosystem ! - ! state (like biomass, LAI, plant density, etc.), but it will start the ! - ! thermodynamic state as a new simulation. ! - ! 2. Same as 1, but it uses history files from ED-2.0 without multiple sites, and ! - ! with the old PFT numbers. ! - ! 3. Same as 1, but using history files from ED-2.0 with multiple sites and ! - ! TOPMODEL hydrology. ! - ! 4. Same as 1, but using ED2.1 H5 history/state files that take the form: ! - ! 'dir/prefix-gxx.h5' ! - ! Initialization files MUST end with -gxx.h5 where xx is a two digit integer ! - ! grid number. Each grid has its own initialization file. As an example, if a ! - ! user has two files to initialize their grids with: ! - ! example_file_init-g01.h5 and example_file_init-g02.h5 ! - ! NL%SFILIN = 'example_file_init' ! - ! ! - ! 5. This is similar to option 4, except that you may provide several files ! - ! (including a mix of regional and POI runs, each file ending at a different ! - ! date). This will not check date nor grid structure, it will simply read all ! - ! polygons and match the nearest neighbour to each polygon of your run. SFILIN ! - ! must have the directory common to all history files that are sought to be used,! - ! up to the last character the files have in common. For example if your files ! - ! are ! - ! /mypath/P0001-S-2000-01-01-000000-g01.h5, ! - ! /mypath/P0002-S-1966-01-01-000000-g02.h5, ! - ! ... ! - ! /mypath/P1000-S-1687-01-01-000000-g01.h5: ! - ! NL%SFILIN = '/mypath/P' ! - ! ! - ! 6 - Initialize with ED-2 style files without multiple sites, exactly like option ! - ! 2, except that the PFT types are preserved. ! - !---------------------------------------------------------------------------------------! - NL%IED_INIT_MODE = @INIT_MODEL@ - !---------------------------------------------------------------------------------------! - - - - !---------------------------------------------------------------------------------------! - ! EDRES -- Expected input resolution for ED2.0 files. This is not used unless ! - ! IED_INIT_MODE = 3. ! - !---------------------------------------------------------------------------------------! - NL%EDRES = 1.0 - !---------------------------------------------------------------------------------------! - - - !---------------------------------------------------------------------------------------! - ! SFILIN -- The meaning and the size of this variable depends on the type of run, set ! - ! at variable NL%RUNTYPE. ! - ! ! - ! 1. INITIAL. Then this is the path+prefix of the previous ecosystem state. This has ! - ! dimension of the number of grids so you can initialize each grid with a ! - ! different dataset. In case only one path+prefix is given, the same will ! - ! be used for every grid. Only some ecosystem variables will be set up ! - ! here, and the initial condition will be in thermodynamic equilibrium. ! - ! ! - ! 2. HISTORY. This is the path+prefix of the history file that will be used. Only the ! - ! path+prefix will be used, as the history for every grid must have come ! - ! from the same simulation. ! - !---------------------------------------------------------------------------------------! - NL%SFILIN = '@SITE_PSSCSS@' - !---------------------------------------------------------------------------------------! - ! History file information. These variables are used to continue a simulation from ! - ! a point other than the beginning. Time must be in UTC. ! - ! ! - ! IMONTHH -- the time of the history file. This is the only place you need to change ! - ! IDATEH dates for a HISTORY run. You may change IMONTHZ and related in case you ! - ! IYEARH want to extend the run, but yo should NOT change IMONTHA and related. ! - ! ITIMEH ! - !---------------------------------------------------------------------------------------! - NL%ITIMEH = 0000 - NL%IDATEH = 01 - NL%IMONTHH = 01 - NL%IYEARH = 1500 - !---------------------------------------------------------------------------------------! - ! NZG - number of soil layers. One value for all grids. ! - ! NZS - maximum number of snow/water pounding layers. This is used only for ! - ! snow, if only liquid water is standing, the water will be all collapsed ! - ! into a single layer, so if you are running for places where it doesn't snow ! - ! a lot, leave this set to 1. One value for all grids. ! - !---------------------------------------------------------------------------------------! - NL%NZG = 9 - NL%NZS = 1 - !---------------------------------------------------------------------------------------! - - - - !---------------------------------------------------------------------------------------! - ! ISOILFLG -- this controls which soil type input you want to use. ! - ! 1. Read in from a dataset I will provide in the SOIL_DATABASE variable a ! - ! few lines below. ! - ! below. ! - ! 2. No data available, I will use constant values I will provide in ! - ! NSLCON or by prescribing the fraction of sand and clay (see SLXSAND ! - ! and SLXCLAY). ! - !---------------------------------------------------------------------------------------! - NL%ISOILFLG = 2 - !---------------------------------------------------------------------------------------! - ! NSLCON -- ED-2 Soil classes that the model will use when ISOILFLG is set to 2. ! - ! Possible values are: ! - !---------------------------------------------------------------------------------------! - ! 1 -- sand | 7 -- silty clay loam | 13 -- bedrock ! - ! 2 -- loamy sand | 8 -- clayey loam | 14 -- silt ! - ! 3 -- sandy loam | 9 -- sandy clay | 15 -- heavy clay ! - ! 4 -- silt loam | 10 -- silty clay | 16 -- clayey sand ! - ! 5 -- loam | 11 -- clay | 17 -- clayey silt ! - ! 6 -- sandy clay loam | 12 -- peat ! - !---------------------------------------------------------------------------------------! - NL%NSLCON = 3 !3 US-WCr, 2 US-Syv, 10 US-Los - !---------------------------------------------------------------------------------------! - ! ISOILCOL -- LEAF-3 and ED-2 soil colour classes that the model will use when ISOILFLG ! - ! is set to 2. Soil classes are from 1 to 20 (1 = lightest; 20 = darkest). ! - ! The values are the same as CLM-4.0. The table is the albedo for visible ! - ! and near infra-red. ! - !---------------------------------------------------------------------------------------! - ! ! - ! |-----------------------------------------------------------------------| ! - ! | | Dry soil | Saturated | | Dry soil | Saturated | ! - ! | Class |-------------+-------------| Class +-------------+-------------| ! - ! | | VIS | NIR | VIS | NIR | | VIS | NIR | VIS | NIR | ! - ! |-------+------+------+------+------+-------+------+------+------+------| ! - ! | 1 | 0.36 | 0.61 | 0.25 | 0.50 | 11 | 0.24 | 0.37 | 0.13 | 0.26 | ! - ! | 2 | 0.34 | 0.57 | 0.23 | 0.46 | 12 | 0.23 | 0.35 | 0.12 | 0.24 | ! - ! | 3 | 0.32 | 0.53 | 0.21 | 0.42 | 13 | 0.22 | 0.33 | 0.11 | 0.22 | ! - ! | 4 | 0.31 | 0.51 | 0.20 | 0.40 | 14 | 0.20 | 0.31 | 0.10 | 0.20 | ! - ! | 5 | 0.30 | 0.49 | 0.19 | 0.38 | 15 | 0.18 | 0.29 | 0.09 | 0.18 | ! - ! | 6 | 0.29 | 0.48 | 0.18 | 0.36 | 16 | 0.16 | 0.27 | 0.08 | 0.16 | ! - ! | 7 | 0.28 | 0.45 | 0.17 | 0.34 | 17 | 0.14 | 0.25 | 0.07 | 0.14 | ! - ! | 8 | 0.27 | 0.43 | 0.16 | 0.32 | 18 | 0.12 | 0.23 | 0.06 | 0.12 | ! - ! | 9 | 0.26 | 0.41 | 0.15 | 0.30 | 19 | 0.10 | 0.21 | 0.05 | 0.10 | ! - ! | 10 | 0.25 | 0.39 | 0.14 | 0.28 | 20 | 0.08 | 0.16 | 0.04 | 0.08 | ! - ! |-----------------------------------------------------------------------| ! - ! ! - ! Soil type 21 is a special case in which we use the albedo method that used to be ! - ! the default in ED-2.1. ! - !---------------------------------------------------------------------------------------! - NL%ISOILCOL = 10 !21 12 for US-Los - !---------------------------------------------------------------------------------------! - ! These variables are used to define the soil properties when you don't want to use ! - ! the standard soil classes. ! - ! ! - ! SLXCLAY -- Prescribed fraction of clay [0-1] ! - ! SLXSAND -- Prescribed fraction of sand [0-1]. ! - ! ! - ! They are used only when ISOILFLG is 2, both values are between 0. and 1., and ! - ! theira sum doesn't exceed 1. Otherwise standard ED values will be used instead. ! - !---------------------------------------------------------------------------------------! - NL%SLXCLAY = 0.13 ! 0.13 US-WCr, 0.06 US-Syv, 0.0663 US-PFa, 0.68 default - NL%SLXSAND = 0.54 ! 0.54 US-WCr, 0.57 US-Syv, 0.5931 US-PFa, 0.20 default - !---------------------------------------------------------------------------------------! - ! Soil grid and initial conditions if no file is provided: ! - ! ! - ! SLZ - soil depth in m. Values must be negative and go from the deepest layer to ! - ! the top. ! - ! SLMSTR - this is the initial soil moisture, now given as the soil moisture index. ! - ! Values can be fraction, in which case they will be linearly interpolated ! - ! between the special points (e.g. 0.5 will put soil moisture half way ! - ! between the wilting point and field capacity). ! - ! -1 = dry air soil moisture ! - ! 0 = wilting point ! - ! 1 = field capacity ! - ! 2 = porosity (saturation) ! - ! STGOFF - initial temperature offset (soil temperature = air temperature + offset) ! - !---------------------------------------------------------------------------------------! - NL%SLZ = -2.0,-1.5, -1.0, -0.80, -0.60, -0.40, -0.2, -0.10, -0.05 - NL%SLMSTR = 0.65, 0.65, 0.65, 0.65, 0.65, 0.65, 0.65, 0.65, 0.65 - NL%STGOFF = 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 - !---------------------------------------------------------------------------------------! - ! Input databases ! - ! VEG_DATABASE -- vegetation database, used only to determine the land/water mask. ! - ! Fill with the path and the prefix. ! - ! SOIL_DATABASE -- soil database, used to determine the soil type. Fill with the ! - ! path and the prefix. ! - ! LU_DATABASE -- land-use change disturbance rates database, used only when ! - ! IANTH_DISTURB is set to 1. Fill with the path and the prefix. ! - ! PLANTATION_FILE -- plantation fraction file. In case you don't have such a file or ! - ! you do not want to use it, you must leave this variable empty: ! - ! (NL%PLANTATION_FILE = '' ! - ! THSUMS_DATABASE -- input directory with dataset to initialise chilling degrees and ! - ! growing degree days, which is used to drive the cold-deciduous ! - ! phenology (you must always provide this, even when your PFTs are ! - ! not cold deciduous). ! - ! ED_MET_DRIVER_DB -- File containing information for meteorological driver ! - ! instructions (the "header" file). ! - ! SOILSTATE_DB -- Dataset in case you want to provide the initial conditions of ! - ! soil temperature and moisture. ! - ! SOILDEPTH_DB -- Dataset in case you want to read in soil depth information. ! - !---------------------------------------------------------------------------------------! - NL%VEG_DATABASE = '@ED_VEG@' - NL%SOIL_DATABASE = '@ED_SOIL@' - NL%LU_DATABASE = '@ED_LU@' - NL%PLANTATION_FILE = '' - NL%THSUMS_DATABASE = '@ED_THSUM@' - NL%ED_MET_DRIVER_DB = '@SITE_MET@' - NL%SOILSTATE_DB = '' - NL%SOILDEPTH_DB = '' - !---------------------------------------------------------------------------------------! - ! ISOILSTATEINIT -- Variable controlling how to initialise the soil temperature and ! - ! moisture ! - ! 0. Use SLMSTR and STGOFF. ! - ! 1. Read from SOILSTATE_DB. ! - ! ISOILDEPTHFLG -- Variable controlling how to initialise soil depth ! - ! 0. Constant, always defined by the first SLZ layer. ! - ! 1. Read from SOILDEPTH_DB. ! - !---------------------------------------------------------------------------------------! - NL%ISOILSTATEINIT = 0 - NL%ISOILDEPTHFLG = 0 - !---------------------------------------------------------------------------------------! - ! ISOILBC -- This controls the soil moisture boundary condition at the bottom. If ! - ! unsure, use 0 for short-term simulations (couple of days), and 1 for long- ! - ! -term simulations (months to years). ! - ! 0. Bedrock. Flux from the bottom of the bottommost layer is set to 0. ! - ! 1. Gravitational flow. The flux from the bottom of the bottommost layer ! - ! is due to gradient of height only. ! - ! 2. Super drainage. Soil moisture of the ficticious layer beneath the ! - ! bottom is always at dry air soil moisture. ! - ! 3. Half-way. Assume that the fictious layer beneath the bottom is always ! - ! at field capacity. ! - ! 4. Aquifer. Soil moisture of the ficticious layer beneath the bottom is ! - ! always at saturation. ! - !---------------------------------------------------------------------------------------! - NL%ISOILBC = 1 - !---------------------------------------------------------------------------------------! - ! SLDRAIN -- This is used only when ISOILBC is set to 2. In this case SLDRAIN is the ! - ! equivalent slope that will slow down drainage. If this is set to zero, ! - ! then lateral drainage reduces to flat bedrock, and if this is set to 90, ! - ! then lateral drainage becomes free drainage. SLDRAIN must be between 0 ! - ! and 90. ! - !---------------------------------------------------------------------------------------! - NL%SLDRAIN = 10. - !---------------------------------------------------------------------------------------! - ! IVEGT_DYNAMICS -- The vegetation dynamics scheme. ! - ! 0. No vegetation dynamics, the initial state will be preserved, ! - ! even though the model will compute the potential values. This ! - ! option is useful for theoretical simulations only. ! - ! 1. Normal ED vegetation dynamics (Moorcroft et al 2001). ! - ! The normal option for almost any simulation. ! - !---------------------------------------------------------------------------------------! - NL%IVEGT_DYNAMICS = 1 - !---------------------------------------------------------------------------------------! - ! IBIGLEAF -- Do you want to run ED as a 'big leaf' model? ! - ! 0. No, use the standard size- and age-structure (Moorcroft et al. 2001) ! - ! This is the recommended method for most applications. ! - ! 1. 'big leaf' ED: this will have no horizontal or vertical hetero- ! - ! geneities; 1 patch per PFT and 1 cohort per patch; no vertical ! - ! growth, recruits will 'appear' instantaneously at maximum height. ! - ! ! - ! N.B. if you set IBIGLEAF to 1, you MUST turn off the crown model (CROWN_MOD = 0) ! - !---------------------------------------------------------------------------------------! - NL%IBIGLEAF = 0 - !---------------------------------------------------------------------------------------! - ! INTEGRATION_SCHEME -- The biophysics integration scheme. ! - ! 0. Euler step. The fastest, but it doesn't estimate ! - ! errors. ! - ! 1. Fourth-order Runge-Kutta method. ED-2.1 default method ! - ! 2. Heun's method (a second-order Runge-Kutta). ! - ! 3. Hybrid Stepping (BDF2 implicit step for the canopy air and ! - ! leaf temp, forward Euler for else, under development). ! - !---------------------------------------------------------------------------------------! - NL%INTEGRATION_SCHEME = 1 - !---------------------------------------------------------------------------------------! - ! RK4_TOLERANCE -- This is the relative tolerance for Runge-Kutta or Heun's ! - ! integration. Larger numbers will make runs go faster, at the ! - ! expense of being less accurate. Currently the valid range is ! - ! between 1.e-7 and 1.e-1, but recommended values are between 1.e-4 ! - ! and 1.e-2. ! - !---------------------------------------------------------------------------------------! - NL%RK4_TOLERANCE = 0.01 - !---------------------------------------------------------------------------------------! - ! IBRANCH_THERMO -- This determines whether branches should be included in the ! - ! vegetation thermodynamics and radiation or not. ! - ! 0. No branches in energy/radiation (ED-2.1 default); ! - ! 1. Branches are accounted in the energy and radiation. Branchwood ! - ! and leaf are treated separately in the canopy radiation scheme, ! - ! but solved as a single pool in the biophysics integration. ! - ! 2. Similar to 1, but branches are treated as separate pools in the ! - ! biophysics (thus doubling the number of prognostic variables). ! - !---------------------------------------------------------------------------------------! - NL%IBRANCH_THERMO = 0 - !---------------------------------------------------------------------------------------! - ! IPHYSIOL -- This variable will determine the functional form that will control how ! - ! the various parameters will vary with temperature, and how the CO2 ! - ! compensation point for gross photosynthesis (Gamma*) will be found. ! - ! Options are: ! - ! ! - ! 0 -- Original ED-2.1, we use the "Arrhenius" function as in Foley et al. (1996) and ! - ! Moorcroft et al. (2001). Gamma* is found using the parameters for tau as in ! - ! Foley et al. (1996). ! - ! 1 -- Modified ED-2.1. In this case Gamma* is found using the Michaelis-Mentel ! - ! coefficients for CO2 and O2, as in Farquhar et al. (1980) and in CLM. ! - ! 2 -- Collatz et al. (1991). We use the power (Q10) equations, with Collatz et al. ! - ! parameters for compensation point, and the Michaelis-Mentel coefficients. The ! - ! correction for high and low temperatures are the same as in Moorcroft et al. ! - ! (2001). ! - ! 3 -- Same as 2, except that we find Gamma* as in Farquhar et al. (1980) and in CLM. ! - !---------------------------------------------------------------------------------------! - NL%IPHYSIOL = 2 - !---------------------------------------------------------------------------------------! - ! IALLOM -- Which allometry to use (this mostly affects tropical PFTs. Temperate PFTs ! - ! will use the new root allometry and the maximum crown area if IALLOM is set ! - ! to 1 or 2). ! - ! 0. Original ED-2.1 ! - ! 1. a. The coefficients for structural biomass are set so the total AGB ! - ! is similar to Baker et al. (2004), equation 2. Balive is the ! - ! default ED-2.1; ! - ! b. Experimental root depth that makes canopy trees to have root depths ! - ! of 5m and grasses/seedlings at 0.5 to have root depth of 0.5 m. ! - ! c. Crown area defined as in Poorter et al. (2006), imposing maximum ! - ! crown area ! - ! 2. Similar to 1, but with a few extra changes. ! - ! a. Height -> DBH allometry as in Poorter et al. (2006) ! - ! b. Balive is retuned, using a few leaf biomass allometric equations for ! - ! a few genuses in Costa Rica. References: ! - ! Cole and Ewel (2006), and Calvo Alvarado et al. (2008). ! - !---------------------------------------------------------------------------------------! - NL%IALLOM = 2 - !---------------------------------------------------------------------------------------! - ! IGRASS -- This controls the dynamics and growth calculation for grasses. A new ! - ! grass scheme is now available where bdead = 0, height is a function of bleaf! - ! and growth happens daily. ALS (3/3/12) ! - ! 0: grasses behave like trees as in ED2.1 (old scheme) ! - ! ! - ! 1: new grass scheme as described above ! - !---------------------------------------------------------------------------------------! - NL%IGRASS = 0 - !---------------------------------------------------------------------------------------! - ! IPHEN_SCHEME -- It controls the phenology scheme. Even within each scheme, the ! - ! actual phenology will be different depending on the PFT. ! - ! ! - ! -1: grasses - evergreen; ! - ! tropical - evergreen; ! - ! conifers - evergreen; ! - ! hardwoods - cold-deciduous (Botta et al.); ! - ! ! - ! 0: grasses - drought-deciduous (old scheme); ! - ! tropical - drought-deciduous (old scheme); ! - ! conifers - evergreen; ! - ! hardwoods - cold-deciduous; ! - ! ! - ! 1: prescribed phenology ! - ! ! - ! 2: grasses - drought-deciduous (new scheme); ! - ! tropical - drought-deciduous (new scheme); ! - ! conifers - evergreen; ! - ! hardwoods - cold-deciduous; ! - ! ! - ! 3: grasses - drought-deciduous (new scheme); ! - ! tropical - drought-deciduous (light phenology); ! - ! conifers - evergreen; ! - ! hardwoods - cold-deciduous; ! - ! ! - ! Old scheme: plants shed their leaves once instantaneous amount of available water ! - ! becomes less than a critical value. ! - ! New scheme: plants shed their leaves once a 10-day running average of available ! - ! water becomes less than a critical value. ! - !---------------------------------------------------------------------------------------! - NL%IPHEN_SCHEME = @PHENOL_SCHEME@ - !---------------------------------------------------------------------------------------! - ! Parameters that control the phenology response to radiation, used only when ! - ! IPHEN_SCHEME = 3. ! - ! ! - ! RADINT -- Intercept ! - ! RADSLP -- Slope. ! - !---------------------------------------------------------------------------------------! - NL%RADINT = -11.3868 - NL%RADSLP = 0.0824 - !---------------------------------------------------------------------------------------! - ! REPRO_SCHEME -- This controls plant reproduction and dispersal. ! - ! 0. Reproduction off. Useful for very short runs only. ! - ! 1. Original reproduction scheme. Seeds are exchanged between ! - ! patches belonging to the same site, but they can't go outside ! - ! their original site. ! - ! 2. Similar to 1, but seeds are exchanged between patches belonging ! - ! to the same polygon, even if they are in different sites. They ! - ! can't go outside their original polygon, though. This is the ! - ! same as option 1 if there is only one site per polygon. ! - ! 3. Similar to 2, but recruits will only be formed if their phenology ! - ! status would be "leaves fully flushed". This only matters for ! - ! drought deciduous plants. This option is for testing purposes ! - ! only, think 50 times before using it... ! - !---------------------------------------------------------------------------------------! - NL%REPRO_SCHEME = 0 - !---------------------------------------------------------------------------------------! - ! LAPSE_SCHEME -- This specifies the met lapse rate scheme: ! - ! 0. No lapse rates ! - ! 1. phenomenological, global ! - ! 2. phenomenological, local (not yet implemented) ! - ! 3. mechanistic(not yet implemented) ! - !---------------------------------------------------------------------------------------! - NL%LAPSE_SCHEME = 0 - !---------------------------------------------------------------------------------------! - ! CROWN_MOD -- Specifies how tree crowns are represent in the canopy radiation model, ! - ! and in the turbulence scheme depending on ICANTURB. ! - ! 0. ED1 default, crowns are evenly spread throughout the patch area, and ! - ! cohorts are stacked on the top of each other. ! - ! 1. Dietze (2008) model. Cohorts have a finite radius, and cohorts are ! - ! stacked on the top of each other. ! - !---------------------------------------------------------------------------------------! - NL%CROWN_MOD = 1 - !---------------------------------------------------------------------------------------! - ! The following variables control the canopy radiation solver. ! - ! ! - ! ICANRAD -- Specifies how canopy radiation is solved. This variable sets both ! - ! shortwave and longwave. ! - ! 0. Two-stream model (Medvigy 2006), with the possibility to apply ! - ! finite crown area to direct shortwave radiation. ! - ! 1. Multiple-scattering model (Zhao and Qualls 2005,2006), with the ! - ! possibility to apply finite crown area to all radiation fluxes. ! - ! LTRANS_VIS -- Leaf transmittance for tropical plants - Visible/PAR ! - ! LTRANS_NIR -- Leaf transmittance for tropical plants - Near Infrared ! - ! LREFLECT_VIS -- Leaf reflectance for tropical plants - Visible/PAR ! - ! LREFLECT_NIR -- Leaf reflectance for tropical plants - Near Infrared ! - ! ORIENT_TREE -- Leaf orientation factor for tropical trees. Extremes are: ! - ! -1. All leaves are oriented in the vertical ! - ! 0. Leaf orientation is perfectly random ! - ! 1. All leaves are oriented in the horizontal ! - ! In practice, acceptable values range from -0.4 and 0.6 ! - ! ORIENT_GRASS -- Leaf orientation factor for tropical grasses. Extremes are: ! - ! -1. All leaves are oriented in the vertical ! - ! 0. Leaf orientation is perfectly random ! - ! 1. All leaves are oriented in the horizontal ! - ! In practice, acceptable values range from -0.4 and 0.6 ! - ! CLUMP_TREE -- Clumping factor for tropical trees. Extremes are: ! - ! lim -> 0. Black hole (0 itself is unacceptable) ! - ! 1. Homogeneously spread over the layer (i.e., no clumping) ! - ! CLUMP_GRASS -- Clumping factor for tropical grasses. Extremes are: ! - ! lim -> 0. Black hole (0 itself is unacceptable) ! - ! 1. Homogeneously spread over the layer (i.e., no clumping) ! - !---------------------------------------------------------------------------------------! - NL%ICANRAD = 0 - NL%LTRANS_VIS = 0.050 - NL%LTRANS_NIR = 0.270 - NL%LREFLECT_VIS = 0.150 - NL%LREFLECT_NIR = 0.540 - NL%ORIENT_TREE = 0.100 - NL%ORIENT_GRASS = -0.100 - NL%CLUMP_TREE = 0.800 - NL%CLUMP_GRASS = 1.000 - !---------------------------------------------------------------------------------------! - ! DECOMP_SCHEME -- This specifies the dependence of soil decomposition on temperature. ! - ! 0. ED-2.0 default, the original exponential ! - ! 1. Lloyd and Taylor (1994) model ! - ! [[option 1 requires parameters to be set in xml]] ! - !---------------------------------------------------------------------------------------! - NL%DECOMP_SCHEME = 0 - !---------------------------------------------------------------------------------------! - ! H2O_PLANT_LIM -- this determines whether plant photosynthesis can be limited by ! - ! soil moisture, the FSW, defined as FSW = Supply / (Demand + Supply). ! - ! ! - ! Demand is always the transpiration rates in case soil moisture is ! - ! not limiting (the psi_0 term times LAI). The supply is determined ! - ! by Kw * nplant * Broot * Available_Water, and the definition of ! - ! available water changes depending on H2O_PLANT_LIM: ! - ! 0. Force FSW = 1 (effectively available water is infinity). ! - ! 1. Available water is the total soil water above wilting point, ! - ! integrated across all layers within the rooting zone. ! - ! 2. Available water is the soil water at field capacity minus ! - ! wilting point, scaled by the so-called wilting factor: ! - ! (psi(k) - (H - z(k)) - psi_wp) / (psi_fc - psi_wp) ! - ! where psi is the matric potentital at layer k, z is the layer ! - ! depth, H it the crown height and psi_fc and psi_wp are the ! - ! matric potentials at wilting point and field capacity. ! - !---------------------------------------------------------------------------------------! - NL%H2O_PLANT_LIM = 1 - !---------------------------------------------------------------------------------------! - ! IDDMORT_SCHEME -- This flag determines whether storage should be accounted in the ! - ! carbon balance. ! - ! 0 -- Carbon balance is done in terms of fluxes only. This is the ! - ! default in ED-2.1 ! - ! 1 -- Carbon balance is offset by the storage pool. Plants will be ! - ! in negative carbon balance only when they run out of storage ! - ! and are still losing more carbon than gaining. ! - ! ! - ! DDMORT_CONST -- This constant (k) determines the relative contribution of light ! - ! and soil moisture to the density-dependent mortality rate. Values ! - ! range from 0 (soil moisture only) to 1 (light only). ! - ! ! - ! mort1 ! - ! mu_DD = ------------------------- ! - ! 1 + exp [ mort2 * cr ] ! - ! ! - ! CB CB ! - ! cr = k ------------- + (1 - k) ------------- ! - ! CB_lightmax CB_watermax ! - !---------------------------------------------------------------------------------------! - NL%IDDMORT_SCHEME = 0 - NL%DDMORT_CONST = 0.8 - !---------------------------------------------------------------------------------------! - ! The following variables are factors that control photosynthesis and respiration. ! - ! Notice that some of them are relative values whereas others are absolute. ! - ! ! - ! VMFACT_C3 -- Factor multiplying the default Vm0 for C3 plants (1.0 = default). ! - ! VMFACT_C4 -- Factor multiplying the default Vm0 for C4 plants (1.0 = default). ! - ! MPHOTO_TRC3 -- Stomatal slope (M) for tropical C3 plants ! - ! MPHOTO_TEC3 -- Stomatal slope (M) for conifers and temperate C3 plants ! - ! MPHOTO_C4 -- Stomatal slope (M) for C4 plants. ! - ! BPHOTO_BLC3 -- cuticular conductance for broadleaf C3 plants [umol/m2/s] ! - ! BPHOTO_NLC3 -- cuticular conductance for needleleaf C3 plants [umol/m2/s] ! - ! BPHOTO_C4 -- cuticular conductance for C4 plants [umol/m2/s] ! - ! KW_GRASS -- Water conductance for trees, in m2/yr/kgC_root. This is used only ! - ! when H2O_PLANT_LIM is not 0. ! - ! KW_TREE -- Water conductance for grasses, in m2/yr/kgC_root. This is used only ! - ! when H2O_PLANT_LIM is not 0. ! - ! GAMMA_C3 -- The dark respiration factor (gamma) for C3 plants. Subtropical ! - ! conifers will be scaled by GAMMA_C3 * 0.028 / 0.02 ! - ! GAMMA_C4 -- The dark respiration factor (gamma) for C4 plants. ! - ! D0_GRASS -- The transpiration control in gsw (D0) for ALL grasses. ! - ! D0_TREE -- The transpiration control in gsw (D0) for ALL trees. ! - ! ALPHA_C3 -- Quantum yield of ALL C3 plants. This is only applied when ! - ! QUANTUM_EFFICIENCY_T = 0. ! - ! ALPHA_C4 -- Quantum yield of C4 plants. This is always applied. ! - ! KLOWCO2IN -- The coefficient that controls the PEP carboxylase limited rate of ! - ! carboxylation for C4 plants. ! - ! RRFFACT -- Factor multiplying the root respiration factor for ALL PFTs. ! - ! (1.0 = default). ! - ! GROWTHRESP -- The actual growth respiration factor (C3/C4 tropical PFTs only). ! - ! (1.0 = default). ! - ! LWIDTH_GRASS -- Leaf width for grasses, in metres. This controls the leaf boundary ! - ! layer conductance (gbh and gbw). ! - ! LWIDTH_BLTREE -- Leaf width for trees, in metres. This controls the leaf boundary ! - ! layer conductance (gbh and gbw). This is applied to broadleaf trees ! - ! only. ! - ! LWIDTH_NLTREE -- Leaf width for trees, in metres. This controls the leaf boundary ! - ! layer conductance (gbh and gbw). This is applied to conifer trees ! - ! only. ! - ! Q10_C3 -- Q10 factor for C3 plants (used only if IPHYSIOL is set to 2 or 3). ! - ! Q10_C4 -- Q10 factor for C4 plants (used only if IPHYSIOL is set to 2 or 3). ! - !---------------------------------------------------------------------------------------! - NL%VMFACT_C3 = 1.00 - NL%VMFACT_C4 = 1.00 - NL%MPHOTO_TRC3 = 9.0 - NL%MPHOTO_TEC3 = 7.2 - NL%MPHOTO_C4 = 5.2 - NL%BPHOTO_BLC3 = 10000. - NL%BPHOTO_NLC3 = 1000. - NL%BPHOTO_C4 = 10000. - NL%KW_GRASS = 900. - NL%KW_TREE = 600. - NL%GAMMA_C3 = 0.015 - NL%GAMMA_C4 = 0.040 - NL%D0_GRASS = 0.016 - NL%D0_TREE = 0.016 - NL%ALPHA_C3 = 0.080 - NL%ALPHA_C4 = 0.055 - NL%KLOWCO2IN = 4000. - NL%RRFFACT = 1.000 - NL%GROWTHRESP = 0.333 - NL%LWIDTH_GRASS = 0.05 - NL%LWIDTH_BLTREE = 0.10 - NL%LWIDTH_NLTREE = 0.05 - NL%Q10_C3 = 2.4 - NL%Q10_C4 = 2.4 - !---------------------------------------------------------------------------------------! - ! THETACRIT -- Leaf drought phenology threshold. The sign matters here: ! - ! >= 0. -- This is the relative soil moisture above the wilting point ! - ! below which the drought-deciduous plants will start shedding ! - ! their leaves ! - ! < 0. -- This is the soil potential in MPa below which the drought- ! - ! -deciduous plants will start shedding their leaves. The wilt- ! - ! ing point is by definition -1.5MPa, so make sure that the value ! - ! is above -1.5. ! - !---------------------------------------------------------------------------------------! - NL%THETACRIT = -1.15 - !---------------------------------------------------------------------------------------! - ! QUANTUM_EFFICIENCY_T -- Which quantum yield model should to use for C3 plants ! - ! 0. Original ED-2.1, quantum efficiency is constant. ! - ! 1. Quantum efficiency varies with temperature following ! - ! Ehleringer (1978) polynomial fit. ! - !---------------------------------------------------------------------------------------! - NL%QUANTUM_EFFICIENCY_T = 0 - !---------------------------------------------------------------------------------------! - ! N_PLANT_LIM -- This controls whether plant photosynthesis can be limited by nitrogen. ! - ! 0. No limitation ! - ! 1. ED-2.1 nitrogen limitation model. ! - !---------------------------------------------------------------------------------------! - NL%N_PLANT_LIM = 0 - !---------------------------------------------------------------------------------------! - ! N_DECOMP_LIM -- This controls whether decomposition can be limited by nitrogen. ! - ! 0. No limitation ! - ! 1. ED-2.1 nitrogen limitation model. ! - !---------------------------------------------------------------------------------------! - NL%N_DECOMP_LIM = 0 - !---------------------------------------------------------------------------------------! - ! The following parameters adjust the fire disturbance in the model. ! - ! INCLUDE_FIRE -- Which threshold to use for fires. ! - ! 0. No fires; ! - ! 1. (deprecated) Fire will be triggered with enough biomass and ! - ! integrated ground water depth less than a threshold. Based on ! - ! ED-1, the threshold assumes that the soil is 1 m, so deeper ! - ! soils will need to be much drier to allow fires to happen and ! - ! often will never allow fires. ! - ! 2. Fire will be triggered with enough biomass and the total soil ! - ! water at the top 75 cm falls below a threshold. ! - ! FIRE_PARAMETER -- If fire happens, this will control the intensity of the disturbance ! - ! given the amount of fuel (currently the total above-ground ! - ! biomass). ! - ! SM_FIRE -- This is used only when INCLUDE_FIRE = 2. The sign here matters. ! - ! >= 0. - Minimum relative soil moisture above dry air of the top 1m ! - ! that will prevent fires to happen. ! - ! < 0. - Minimum mean soil moisture potential in MPa of the top 1m ! - ! that will prevent fires to happen. The dry air soil ! - ! potential is defined as -3.1 MPa, so make sure SM_FIRE is ! - ! greater than this value. ! - !---------------------------------------------------------------------------------------! - NL%INCLUDE_FIRE = 0 ! default is 2 - NL%FIRE_PARAMETER = 0.2 - NL%SM_FIRE = -1.45 - !---------------------------------------------------------------------------------------! - ! IANTH_DISTURB -- This flag controls whether to include anthropogenic disturbances ! - ! such as land clearing, abandonment, and logging. ! - ! 0. no anthropogenic disturbance. ! - ! 1. use anthropogenic disturbance dataset. ! - !---------------------------------------------------------------------------------------! - NL%IANTH_DISTURB = 0 - !---------------------------------------------------------------------------------------! - ! ICANTURB -- This flag controls the canopy roughness. ! - ! 0. Based on Leuning et al. (1995), wind is computed using the similarity ! - ! theory for the top cohort, and they are extinguished with cumulative ! - ! LAI. If using CROWN_MOD 1 or 2, this will use local LAI and average ! - ! by crown area. ! - ! 1. The default ED-2.1 scheme, except that it uses the zero-plane ! - ! displacement height. ! - ! 2. This uses the method of Massman (1997) using constant drag and no ! - ! sheltering factor. ! - ! 3. This is also based on Massman (1997), but with the option of varying ! - ! the drag and sheltering within the canopy. ! - ! 4. Same as 0, but if finds the ground conductance following CLM ! - ! technical note (equations 5.98-5.100). ! - !---------------------------------------------------------------------------------------! - NL%ICANTURB = 1 - !---------------------------------------------------------------------------------------! - ! ISFCLYRM -- Similarity theory model. The model that computes u*, T*, etc... ! - ! 1. BRAMS default, based on Louis (1979). It uses empirical relations to ! - ! estimate the flux based on the bulk Richardson number ! - ! ! - ! All models below use an interative method to find z/L, and the only change ! - ! is the functional form of the psi functions. ! - ! ! - ! 2. Oncley and Dudhia (1995) model, based on MM5. ! - ! 3. Beljaars and Holtslag (1991) model. Similar to 2, but it uses an alternative ! - ! method for the stable case that mixes more than the OD95. ! - ! 4. CLM (2004). Similar to 2 and 3, but they have special functions to deal with ! - ! very stable and very stable cases. ! - !---------------------------------------------------------------------------------------! - NL%ISFCLYRM = 4 ! 3 set by default - !---------------------------------------------------------------------------------------! - ! IED_GRNDVAP -- Methods to find the ground -> canopy conductance. ! - ! 0. Modified Lee Pielke (1992), adding field capacity, but using beta factor ! - ! without the square, like in Noilhan and Planton (1989). This is the closest ! - ! to the original ED-2.0 and LEAF-3, and it is also the recommended one. ! - ! 1. Test # 1 of Mahfouf and Noilhan (1991) ! - ! 2. Test # 2 of Mahfouf and Noilhan (1991) ! - ! 3. Test # 3 of Mahfouf and Noilhan (1991) ! - ! 4. Test # 4 of Mahfouf and Noilhan (1991) ! - ! 5. Combination of test #1 (alpha) and test #2 (soil resistance). ! - ! In all cases the beta term is modified so it approaches zero as soil moisture goes ! - ! to dry air soil. ! - !---------------------------------------------------------------------------------------! - NL%IED_GRNDVAP = 0 - !---------------------------------------------------------------------------------------! - ! The following variables are used to control the similarity theory model. For the ! - ! meaning of these parameters, check Beljaars and Holtslag (1991). ! - ! GAMM -- gamma coefficient for momentum, unstable case (dimensionless) ! - ! Ignored when ISTAR = 1 ! - ! GAMH -- gamma coefficient for heat, unstable case (dimensionless) ! - ! Ignored when ISTAR = 1 ! - ! TPRANDTL -- Turbulent Prandtl number ! - ! Ignored when ISTAR = 1 ! - ! RIBMAX -- maximum bulk Richardson number. ! - ! LEAF_MAXWHC -- Maximum water that can be intercepted by leaves, in kg/m2leaf. ! - !---------------------------------------------------------------------------------------! - NL%GAMM = 13.0 - NL%GAMH = 13.0 - NL%TPRANDTL = 0.74 - NL%RIBMAX = 0.50 - NL%LEAF_MAXWHC = 0.11 - !---------------------------------------------------------------------------------------! - ! IPERCOL -- This controls percolation and infiltration. ! - ! 0. Default method. Assumes soil conductivity constant and for the ! - ! temporary surface water, it sheds liquid in excess of a 1:9 liquid- ! - ! -to-ice ratio through percolation. Temporary surface water exists ! - ! only if the top soil layer is at saturation. ! - ! 1. Constant soil conductivity, and it uses the percolation model as in ! - ! Anderson (1976) NOAA technical report NWS 19. Temporary surface ! - ! water may exist after a heavy rain event, even if the soil doesn't ! - ! saturate. Recommended value. ! - ! 2. Soil conductivity decreases with depth even for constant soil moisture ! - ! , otherwise it is the same as 1. ! - !---------------------------------------------------------------------------------------! - NL%IPERCOL = 1 - !---------------------------------------------------------------------------------------! - ! The following variables control the plant functional types (PFTs) that will be ! - ! used in this simulation. ! - ! ! - ! INCLUDE_THESE_PFT -- a list containing all the PFTs you want to include in this run ! - ! AGRI_STOCK -- which PFT should be used for agriculture ! - ! (used only when IANTH_DISTURB = 1) ! - ! PLANTATION_STOCK -- which PFT should be used for plantation ! - ! (used only when IANTH_DISTURB = 1) ! - ! ! - ! PFT table ! - !---------------------------------------------------------------------------------------! - ! 1 - C4 grass | 9 - early temperate deciduous ! - ! 2 - early tropical | 10 - mid temperate deciduous ! - ! 3 - mid tropical | 11 - late temperate deciduous ! - ! 4 - late tropical | 12:15 - agricultural PFTs ! - ! 5 - temperate C3 grass | 16 - Subtropical C3 grass ! - ! 6 - northern pines | (C4 grass with C3 photo). ! - ! 7 - southern pines | 17 - "Araucaria" (non-optimised ! - ! 8 - late conifers | Southern Pines). ! - !---------------------------------------------------------------------------------------! - NL%INCLUDE_THESE_PFT = 1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17 ! List of PFTs to be included - NL%AGRI_STOCK = 5 ! Agriculture PFT (used only if ianth_disturb=1) - NL%PLANTATION_STOCK = 6 ! Plantation PFT (used only if ianth_disturb=1) - !---------------------------------------------------------------------------------------! - ! PFT_1ST_CHECK -- What to do if the initialisation file has a PFT that is not listed ! - ! in INCLUDE_THESE_PFT (ignored if IED_INIT_MODE is -1 or 0) ! - ! 0. Stop the run ! - ! 1. Add the PFT in the INCLUDE_THESE_PFT list ! - ! 2. Ignore the cohort ! - !---------------------------------------------------------------------------------------! - NL%PFT_1ST_CHECK = 0 - !---------------------------------------------------------------------------------------! - ! The following variables control the size of sub-polygon structures in ED-2. ! - ! MAXSITE -- This is the strict maximum number of sites that each polygon can ! - ! contain. Currently this is used only when the user wants to run ! - ! the same polygon with multiple soil types. If there aren't that ! - ! many different soil types with a minimum area (check MIN_SITE_AREA ! - ! below), then the model will allocate just the amount needed. ! - ! MAXPATCH -- If number of patches in a given site exceeds MAXPATCH, force patch ! - ! fusion. If MAXPATCH is 0, then fusion will never happen. If ! - ! MAXPATCH is negative, then the absolute value is used only during ! - ! the initialization, and fusion will never happen again. Notice ! - ! that if the patches are too different, then the actual number of ! - ! patches in a site may exceed MAXPATCH. ! - ! MAXCOHORT -- If number of cohorts in a given patch exceeds MAXCOHORT, force ! - ! cohort fusion. If MAXCOHORT is 0, then fusion will never happen. ! - ! If MAXCOHORT is negative, then the absolute value is used only ! - ! during the initialization, and fusion will never happen again. ! - ! Notice that if the cohorts are too different, then the actual ! - ! number of cohorts in a patch may exceed MAXCOHORT. ! - ! MIN_SITE_AREA -- This is the minimum fraction area of a given soil type that allows ! - ! a site to be created (ignored if IED_INIT_MODE is set to 3). ! - ! MIN_PATCH_AREA -- This is the minimum fraction area of a given soil type that allows ! - ! a site to be created (ignored if IED_INIT_MODE is set to 3). ! - !---------------------------------------------------------------------------------------! - NL%MAXSITE = 6 - NL%MAXPATCH = 30 - NL%MAXCOHORT = 60 - NL%MIN_SITE_AREA = 0.005 - NL%MIN_PATCH_AREA = 0.005 - !---------------------------------------------------------------------------------------! - ! ZROUGH -- constant roughness, in metres, if for all domain ! - !---------------------------------------------------------------------------------------! - NL%ZROUGH = 0.1 - !---------------------------------------------------------------------------------------! - ! Treefall disturbance parameters. ! - ! TREEFALL_DISTURBANCE_RATE -- Sign-dependent treefall disturbance rate: ! - ! > 0. usual disturbance rate, in 1/years; ! - ! = 0. No treefall disturbance; ! - ! < 0. Treefall will be added as a mortality rate (it ! - ! will kill plants, but it won't create a new patch). ! - ! TIME2CANOPY -- Minimum patch age for treefall disturbance to happen. ! - ! If TREEFALL_DISTURBANCE_RATE = 0., this value will be ! - ! ignored. If this value is different than zero, then ! - ! TREEFALL_DISTURBANCE_RATE is internally adjusted so the ! - ! average patch age is still 1/TREEFALL_DISTURBANCE_RATE ! - !---------------------------------------------------------------------------------------! - NL%TREEFALL_DISTURBANCE_RATE = 0.0 !0.014 - NL%TIME2CANOPY = 0.0 - !---------------------------------------------------------------------------------------! - ! RUNOFF_TIME -- In case a temporary surface water (TSW) is created, this is the "e- ! - ! -folding lifetime" of the TSW in seconds due to runoff. If you don't ! - ! want runoff to happen, set this to 0. ! - !---------------------------------------------------------------------------------------! - NL%RUNOFF_TIME = 86400.0 - !---------------------------------------------------------------------------------------! - ! The following variables control the minimum values of various velocities in the ! - ! canopy. This is needed to avoid the air to be extremely still, or to avoid singular- ! - ! ities. When defining the values, keep in mind that UBMIN >= UGBMIN >= USTMIN. ! - ! ! - ! UBMIN -- minimum wind speed at the top of the canopy air space [ m/s] ! - ! UGBMIN -- minimum wind speed at the leaf level [ m/s] ! - ! USTMIN -- minimum friction velocity, u*, in m/s. [ m/s] ! - !---------------------------------------------------------------------------------------! - NL%UBMIN = 0.65 - NL%UGBMIN = 0.25 - NL%USTMIN = 0.05 - !---------------------------------------------------------------------------------------! - ! Control parameters for printing to standard output. Any variable can be printed ! - ! to standard output as long as it is one dimensional. Polygon variables have been ! - ! tested, no gaurtantees for other hierarchical levels. Choose any variables that are ! - ! defined in the variable table fill routine in ed_state_vars.f90. Choose the start ! - ! and end index of the polygon,site,patch or cohort. It should work in parallel. The ! - ! indices are global indices of the entire domain. The are printed out in rows of 10 ! - ! columns each. ! - ! ! - ! IPRINTPOLYS -- 0. Do not print information to screen ! - ! 1. Print polygon arrays to screen, use variables described below to ! - ! determine which ones and how ! - ! NPVARS -- Number of variables to be printed ! - ! PRINTVARS -- List of variables to be printed ! - ! PFMTSTR -- The standard fortran format for the prints. One format per variable ! - ! IPMIN -- First polygon (absolute index) to be print ! - ! IPMAX -- Last polygon (absolute index) to print ! - !---------------------------------------------------------------------------------------! - NL%IPRINTPOLYS = 0 - NL%NPVARS = 1 - NL%PRINTVARS = 'AVG_PCPG','AVG_CAN_TEMP','AVG_VAPOR_AC','AVG_CAN_SHV' - NL%PFMTSTR = 'f10.8','f5.1','f7.2','f9.5' - NL%IPMIN = 1 - NL%IPMAX = 60 - !---------------------------------------------------------------------------------------! - ! Variables that control the meteorological forcing. ! - ! ! - ! IMETTYPE -- Format of the meteorological dataset ! - ! 0. ASCII (deprecated) ! - ! 1. HDF5 ! - ! ISHUFFLE -- How to choose an year outside the meterorological data range (see ! - ! METCYC1 and METCYCF). ! - ! 0. Sequentially cycle over years ! - ! 1. Randomly pick the years, using the same sequence. This has worked ! - ! with gfortran running in Mac OS X system, but it acts like option 2 ! - ! when running ifort. ! - ! 2. Randomly pick the years, choosing a different sequence each time ! - ! the model is run. ! - ! IMETCYC1 -- First year with meteorological information ! - ! IMETCYCF -- Last year with meteorological information ! - ! IMETAVG -- How the input radiation was originally averaged. You must tell this ! - ! because ED-2.1 can make a interpolation accounting for the cosine of ! - ! zenith angle. ! - ! -1. I don't know, use linear interpolation. ! - ! 0. No average, the values are instantaneous ! - ! 1. Averages ending at the reference time ! - ! 2. Averages beginning at the reference time ! - ! 3. Averages centred at the reference time ! - ! IMETRAD -- What should the model do with the input short wave radiation? ! - ! 0. Nothing, use it as is. ! - ! 1. Add them together, then use the SiB method to break radiation down ! - ! into the four components (PAR direct, PAR diffuse, NIR direct, ! - ! NIR diffuse). ! - ! 2. Add then together, then use the method by Weiss and Norman (1985) ! - ! to break radiation down to the four components. ! - ! 3. Gloomy -- All radiation goes to diffuse. ! - ! 4. Sesame street -- all radiation goes to direct, except at night. ! - ! INITIAL_CO2 -- Initial value for CO2 in case no CO2 is provided at the meteorological ! - ! driver dataset [Units: ?mol/mol] ! - !---------------------------------------------------------------------------------------! - NL%IMETTYPE = 1 ! 0 = ASCII, 1 = HDF5 - NL%ISHUFFLE = 2 ! 2. Randomly pick recycled years - NL%METCYC1 = @MET_START@ ! First year of met data - NL%METCYCF = @MET_END@ ! Last year of met data - NL%IMETAVG = @MET_SOURCE@ - NL%IMETRAD = 0 - NL%INITIAL_CO2 = 370.0 ! Initial value for CO2 in case no CO2 is provided at the - ! meteorological driver dataset - !---------------------------------------------------------------------------------------! - ! The following variables control the phenology prescribed from observations: ! - ! ! - ! IPHENYS1 -- First year for spring phenology ! - ! IPHENYSF -- Final year for spring phenology ! - ! IPHENYF1 -- First year for fall/autumn phenology ! - ! IPHENYFF -- Final year for fall/autumn phenology ! - ! PHENPATH -- path and prefix of the prescribed phenology data. ! - ! ! - ! If the years don't cover the entire simulation period, they will be recycled. ! - !---------------------------------------------------------------------------------------! - NL%IPHENYS1 = @PHENOL_START@ - NL%IPHENYSF = @PHENOL_END@ - NL%IPHENYF1 = @PHENOL_START@ - NL%IPHENYFF = @PHENOL_END@ - NL%PHENPATH = '@PHENOL@' - !---------------------------------------------------------------------------------------! - ! These are some additional configuration files. ! - ! IEDCNFGF -- XML file containing additional parameter settings. If you don't have ! - ! one, leave it empty ! - ! EVENT_FILE -- file containing specific events that must be incorporated into the ! - ! simulation. ! - ! PHENPATH -- path and prefix of the prescribed phenology data. ! - !---------------------------------------------------------------------------------------! - NL%IEDCNFGF = '@CONFIGFILE@' - NL%EVENT_FILE = 'myevents.xml' - !---------------------------------------------------------------------------------------! - ! Census variables. This is going to create unique census statuses to cohorts, to ! - ! better compare the model with census observations. In case you don't intend to ! - ! compare the model with census data, set up DT_CENSUS to 1., otherwise you may reduce ! - ! cohort fusion. ! - ! DT_CENSUS -- Time between census, in months. Currently the maximum is 60 ! - ! months, to avoid excessive memory allocation. Every time the ! - ! simulation reaches the census time step, all census tags will be ! - ! reset. ! - ! YR1ST_CENSUS -- In which year was the first census conducted? ! - ! MON1ST_CENSUS -- In which month was the first census conducted? ! - ! MIN_RECRUIT_DBH -- Minimum DBH that is measured in the census, in cm. ! - !---------------------------------------------------------------------------------------! - NL%DT_CENSUS = 1 - NL%YR1ST_CENSUS = 1901 - NL%MON1ST_CENSUS = 7 - NL%MIN_RECRUIT_DBH = 10 - !---------------------------------------------------------------------------------------! - ! The following variables are used to control the detailed output for debugging ! - ! purposes. ! - ! ! - ! IDETAILED -- This flag controls the possible detailed outputs, mostly used for ! - ! debugging purposes. Notice that this doesn't replace the normal debug- ! - ! ger options, the idea is to provide detailed output to check bad ! - ! assumptions. The options are additive, and the indices below represent ! - ! the different types of output: ! - ! ! - ! 1 -- Detailed budget (every DTLSM) ! - ! 2 -- Detailed photosynthesis (every DTLSM) ! - ! 4 -- Detailed output from the integrator (every HDID) ! - ! 8 -- Thermodynamic bounds for sanity check (every DTLSM) ! - ! 16 -- Daily error stats (which variable caused the time step to shrink) ! - ! 32 -- Allometry parameters, and minimum and maximum sizes ! - ! (two files, only at the beginning) ! - ! ! - ! In case you don't want any detailed output (likely for most runs), set ! - ! IDETAILED to zero. In case you want to generate multiple outputs, add ! - ! the number of the sought options: for example, if you want detailed ! - ! photosynthesis and detailed output from the integrator, set IDETAILED ! - ! to 6 (2 + 4). Any combination of the above outputs is acceptable, al- ! - ! though all but the last produce a sheer amount of txt files, in which ! - ! case you may want to look at variable PATCH_KEEP. It is also a good ! - ! idea to set IVEGT_DYNAMICS to 0 when using the first five outputs. ! - ! ! - ! ! - ! PATCH_KEEP -- This option will eliminate all patches except one from the initial- ! - ! isation. This is only used when one of the first five types of ! - ! detailed output is active, otherwise it will be ignored. Options are: ! - ! -2. Keep only the patch with the lowest potential LAI ! - ! -1. Keep only the patch with the highest potential LAI ! - ! 0. Keep all patches. ! - ! > 0. Keep the patch with the provided index. In case the index is ! - ! not valid, the model will crash. ! - !---------------------------------------------------------------------------------------! - NL%IDETAILED = 0 - NL%PATCH_KEEP = 0 - !---------------------------------------------------------------------------------------! - - - !---------------------------------------------------------------------------------------! - ! IOPTINPT -- Optimization configuration. (Currently not used) ! - !---------------------------------------------------------------------------------------! - NL%IOPTINPT = '' - !---------------------------------------------------------------------------------------! -$END -!==========================================================================================! -!==========================================================================================! diff --git a/models/ed/inst/ED2IN.r2.2.0 b/models/ed/inst/ED2IN.r2.2.0 new file mode 100644 index 00000000000..14e29ff6d56 --- /dev/null +++ b/models/ed/inst/ED2IN.r2.2.0 @@ -0,0 +1,1997 @@ +!==========================================================================================! +!==========================================================================================! +! ED2IN . ! +! ! +! This is the file that contains the variables that define how ED is to be run. There ! +! is some brief information about the variables here. Some of the variables allow ! +! switching between algorithms; in this case we highlight the status of each ! +! implementation using the following labels: ! +! ! +! ED-2.2 default. ! +! These are the options described in the ED-2.2 technical note (L19) and that have been ! +! thoroughly tested. When unsure, we recommend to use this option. ! +! ! +! ED-2.2 alternative. ! +! These are the options described either in the ED-2.2 or other publications (mostly ! +! X16), and should be fully functional. Depending on the application, this may be the ! +! most appropriate option. ! +! ! +! Legacy. ! +! Older implementations that have been implemented in ED-1.0, ED-2.0, or ED-2.1. These ! +! options are still fully functional and may be the most appropriate option depending ! +! on the question. ! +! ! +! Beta. ! +! Well developed alternative implementations to the ED-2.2 default. These ! +! implementations are nearly complete, but they have not be thoroughly tested. Feel ! +! free to try if you think it is useful, but bear in mind that they may still need some ! +! adjustments. ! +! ! +! Under development. ! +! Alternative implementations to the ED-2.2 default, but not yet fully implemented. Do ! +! not use these options unless you are willing to contribute to the development. ! +! ! +! Deprecated. ! +! Older implementations that have shown important limitations. They are included for ! +! back-compatibility but we strongly discourage their use in most cases. ! +! ! +! Non-functional. ! +! Older implementations that have been discontinued or methods not yet implemented. ! +! Do not use these options. ! +! ! +! References: ! +! ! +! Longo M, Knox RG, Medvigy DM, Levine NM, Dietze MC, Kim Y, Swann ALS, Zhang K, ! +! Rollinson CR, Bras RL, Wofsy SC, Moorcroft PR. 2019. The biophysics, ecology, and ! +! biogeochemistry of functionally diverse, vertically and horizontally heterogeneous ! +! ecosystems: the Ecosystem Demography model, version 2.2 ? part 1: Model description. ! +! Geosci. Model Dev. 12: 4309-4346, doi:10.5194/gmd-12-4309-2019 (L19). ! +! ! +! Xu X, Medvigy D, Powers JS, Becknell JM , Guan K. 2016. Diversity in plant hydraulic ! +! traits explains seasonal and inter-annual variations of vegetation dynamics in ! +! seasonally dry tropical forests. New Phytol. 212: 80-95, doi:10.1111/nph.14009 (X16). ! +!------------------------------------------------------------------------------------------! +$ED_NL + + !----- Simulation title (64 characters). -----------------------------------------------! + NL%EXPNME = 'ED2 vGITHUB PEcAn @ENSNAME@' + !---------------------------------------------------------------------------------------! + + + + + !---------------------------------------------------------------------------------------! + ! Type of run: ! + ! ! + ! INITIAL -- ED-2 will start a new run. Initial conditions will be set by ! + ! IED_INIT_MODE. The initial conditions can be based on a previous run ! + ! (restart/history), but then it will use only the biomass information as a ! + ! simple initial condition. Energy, water, and CO2 will use standard ! + ! initial conditions. ! + ! HISTORY -- ED-2 will resume a simulation from the last history, and every variable ! + ! (including forest structure and thermodynamics) will be assigned based on ! + ! the history file. ! + ! IMPORTANT: this option is intended for continuing interrupted simulations ! + ! (e.g. power outage). We discourage users to select this option and ! + ! provide restart files generated by different commits. ! + !---------------------------------------------------------------------------------------! + NL%RUNTYPE = 'INITIAL' + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! Start of simulation. Information must be given in UTC time. ! + !---------------------------------------------------------------------------------------! + NL%IMONTHA = @START_MONTH@ ! Month + NL%IDATEA = @START_DAY@ ! Day + NL%IYEARA = @START_YEAR@ ! Year + NL%ITIMEA = 0000 ! UTC + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! End of simulation. Information must be given in UTC time. ! + !---------------------------------------------------------------------------------------! + NL%IMONTHZ = @END_MONTH@ ! Month + NL%IDATEZ = @END_DAY@ ! Day + NL%IYEARZ = @END_YEAR@ ! Year + NL%ITIMEZ = 0000 ! UTC + !---------------------------------------------------------------------------------------! + + + + + !---------------------------------------------------------------------------------------! + ! DTLSM -- Basic time step [seconds] for photosynthesis, and maximum step for thermo- ! + ! dynamics. Recommended values range from 240 to 900 seconds when using the ! + ! 4th-order Runge Kutta integrator (INTEGRATION_SCHEME=1). We discourage ! + ! using the forward Euler scheme (INTEGRATION_SCHEME=0), but in case you ! + ! really want to use it, set the time step to 60 seconds or shorter. ! + ! RADFRQ -- Time step for the canopy radiative transfer model [seconds]. This value ! + ! must be an integer multiple of DTLSM, and we recommend it to be exactly ! + ! the same as DTSLM. ! + !---------------------------------------------------------------------------------------! + NL%DTLSM = 600 + NL%RADFRQ = 600 + !---------------------------------------------------------------------------------------! + + + + + !---------------------------------------------------------------------------------------! + ! MONTH_YRSTEP -- Month in which the yearly time step (patch dynamics) should occur. ! + !---------------------------------------------------------------------------------------! + NL%MONTH_YRSTEP = 7 + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! The following variables are used in case the user wants to run a regional run. ! + ! ! + ! N_ED_REGION -- number of regions for which you want to run ED. This can be set to ! + ! zero provided that N_POI is not... ! + ! GRID_TYPE -- which kind of grid to run: ! + ! 0. Longitude/latitude grid ! + ! 1. Polar-stereographic ! + !---------------------------------------------------------------------------------------! + NL%N_ED_REGION = 0 + NL%GRID_TYPE = 0 + + !------------------------------------------------------------------------------------! + ! The following variables are used only when GRID_TYPE is set to 0. You must ! + ! provide one value for each grid, except otherwise noted. ! + ! ! + ! GRID_RES -- Grid resolution, in degrees (first grid only, the other grids ! + ! resolution will be defined by NSTRATX/NSTRATY). ! + ! ED_REG_LATMIN -- Southernmost point of each region. ! + ! ED_REG_LATMAX -- Northernmost point of each region. ! + ! ED_REG_LONMIN -- Westernmost point of each region. ! + ! ED_REG_LONMAX -- Easternmost point of each region. ! + !------------------------------------------------------------------------------------! + NL%GRID_RES = 1.0 + NL%ED_REG_LATMIN = -12.0, -7.5, 10.0, -6.0 + NL%ED_REG_LATMAX = 1.0, -3.5, 15.0, -1.0 + NL%ED_REG_LONMIN = -66.0,-58.5, 70.0, -63.0 + NL%ED_REG_LONMAX = -49.0,-54.5, 35.0, -53.0 + !------------------------------------------------------------------------------------! + + + + !------------------------------------------------------------------------------------! + ! The following variables are used only when GRID_TYPE is set to 1. ! + ! ! + ! NNXP -- number of points in the X direction. One value for each grid. ! + ! NNYP -- number of points in the Y direction. One value for each grid. ! + ! DELTAX -- grid resolution in the X direction, near the grid pole. Units: [ m]. ! + ! this value is used to define the first grid only, other grids are ! + ! defined using NNSTRATX. ! + ! DELTAY -- grid resolution in the Y direction, near the grid pole. Units: [ m]. ! + ! this value is used to define the first grid only, other grids are ! + ! defined using NNSTRATX. Unless you are running some specific tests, ! + ! both DELTAX and DELTAY should be the same. ! + ! POLELAT -- Latitude of the pole point. Set this close to CENTLAT for a more ! + ! traditional "square" domain. One value for all grids. ! + ! POLELON -- Longitude of the pole point. Set this close to CENTLON for a more ! + ! traditional "square" domain. One value for all grids. ! + ! CENTLAT -- Latitude of the central point. One value for each grid. ! + ! CENTLON -- Longitude of the central point. One value for each grid. ! + !------------------------------------------------------------------------------------! + NL%NNXP = 110 + NL%NNYP = 70 + NL%DELTAX = 60000 + NL%DELTAY = 60000 + NL%POLELAT = -2.857 + NL%POLELON = -54.959 + NL%CENTLAT = -2.857 + NL%CENTLON = -54.959 + !------------------------------------------------------------------------------------! + + + + !------------------------------------------------------------------------------------! + ! Nest ratios. These values are used by both GRID_TYPE=0 and GRID_TYPE=1. ! + ! NSTRATX -- this is will divide the values given by DELTAX or GRID_RES for the ! + ! nested grids. The first value should be always one. ! + ! NSTRATY -- this is will divide the values given by DELTAY or GRID_RES for the ! + ! nested grids. The first value should be always one, and this must ! + ! be always the same as NSTRATX when GRID_TYPE = 0, and this is also ! + ! strongly recommended for when GRID_TYPE = 1. ! + !------------------------------------------------------------------------------------! + NL%NSTRATX = 1,4 + NL%NSTRATY = 1,4 + !------------------------------------------------------------------------------------! + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! The following variables are used to define single polygon of interest runs, and ! + ! they are ignored when N_ED_REGION = 0. ! + ! ! + ! N_POI -- number of polygons of interest (POIs). This can be zero as long as ! + ! N_ED_REGION is not. ! + ! POI_LAT -- list of latitudes of each POI. ! + ! POI_LON -- list of longitudes of each POI. ! + ! POI_RES -- grid resolution of each POI (degrees). This is used only to define the ! + ! soil types ! + !---------------------------------------------------------------------------------------! + NL%N_POI = 1 + NL%POI_LAT = @SITE_LAT@ + NL%POI_LON = @SITE_LON@ + NL%POI_RES = 1.00 + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! LOADMETH -- Load balancing method. This is used only in regional runs run in ! + ! parallel. ! + ! 0. Let ED decide the best way of splitting the polygons. Commonest ! + ! option and default. ! + ! 1. One of the methods to split polygons based on their previous ! + ! work load. Developpers only. ! + ! 2. Try to load an equal number of SITES per node. Useful for when ! + ! total number of polygon is the same as the total number of cores. ! + ! 3. Another method to split polygons based on their previous work load. ! + ! Developpers only. ! + !---------------------------------------------------------------------------------------! + NL%LOADMETH = 0 + !---------------------------------------------------------------------------------------! + + + + + !---------------------------------------------------------------------------------------! + ! ED2 File output. For all the variables 0 means no output and 3 means HDF5 output. ! + ! ! + ! IFOUTPUT -- Fast analysis. These are mostly polygon-level averages, and the time ! + ! interval between files is determined by FRQANL ! + ! IDOUTPUT -- Daily means (one file per day) ! + ! IMOUTPUT -- Monthly means (one file per month) ! + ! IQOUTPUT -- Monthly means of the diurnal cycle (one file per month). The number ! + ! of points for the diurnal cycle is 86400 / FRQANL ! + ! IYOUTPUT -- Annual output. ! + ! ITOUTPUT -- Instantaneous fluxes, mostly polygon-level variables, one file per year. ! + ! IOOUTPUT -- Observation time output. Equivalent to IFOUTPUT, except only at the ! + ! times specified in OBSTIME_DB. ! + ! ISOUTPUT -- restart file, for HISTORY runs. The time interval between files is ! + ! determined by FRQHIS ! + !---------------------------------------------------------------------------------------! + NL%IFOUTPUT = 0 + NL%IDOUTPUT = 0 + NL%IMOUTPUT = 3 + NL%IQOUTPUT = 0 + NL%IYOUTPUT = 0 + NL%ITOUTPUT = 3 + NL%IOOUTPUT = 0 + NL%ISOUTPUT = 3 + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! The following variables control whether site-, patch-, and cohort-level time ! + ! means and mean sum of squares should be included in the output files or not. ! + ! If these options are on, then they provide much more detailed output, but they may ! + ! add a lot of disk space (especially if you want the fast output to have the detailed ! + ! output). ! + ! ! + ! IADD_SITE_MEANS -- Add site-level averages to the output ! + ! IADD_PATCH_MEANS -- Add patch-level averages to the output ! + ! IADD_COHORT_MEANS -- Add cohort-level averages to the output ! + ! ! + ! The options are additive, and the indices below represent the different types of ! + ! output: ! + ! ! + ! 0 -- No detailed output. ! + ! 1 -- Include the level in monthly output (IMOUTPUT and IQOUTPUT) ! + ! 2 -- Include the level in daily output (IDOUTPUT). ! + ! 4 -- Include the level in sub-daily output (IFOUTPUT and IOOUTPUT). ! + ! ! + ! For example, in case you don't want any cohort output, set IADD_COHORT_MEANS to zero. ! + ! In case you want to generate include cohort means to both daily and monthly outputs, ! + ! but not the sub-daily means, set IADD_COHORT_MEANS to 3 (1 + 2). Any combination of ! + ! the above outputs is acceptable (i.e., any number from 0 to 7). ! + !---------------------------------------------------------------------------------------! + NL%IADD_SITE_MEANS = 1 + NL%IADD_PATCH_MEANS = 1 + NL%IADD_COHORT_MEANS = 1 + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! ATTACH_METADATA -- Flag for attaching metadata to HDF datasets. Attaching metadata ! + ! will aid new users in quickly identifying dataset descriptions but ! + ! will compromise I/O performance significantly. ! + ! 0 = no metadata, 1 = attach metadata ! + !---------------------------------------------------------------------------------------! + NL%ATTACH_METADATA = 1 + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! UNITFAST -- The following variables control the units for FRQFAST/OUTFAST, and ! + ! UNITSTATE FRQSTATE/OUTSTATE, respectively. Possible values are: ! + ! 0. Seconds; ! + ! 1. Days; ! + ! 2. Calendar months (variable) ! + ! 3. Calendar years (variable) ! + ! ! + ! N.B.: 1. In case OUTFAST/OUTSTATE are set to special flags (-1 or -2) ! + ! UNITFAST/UNITSTATE will be ignored for them. ! + ! 2. In case IQOUTPUT is set to 3, then UNITFAST has to be 0. ! + ! ! + !---------------------------------------------------------------------------------------! + NL%UNITFAST = 0 + NL%UNITSTATE = 3 + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! OUTFAST/OUTSTATE -- these control the number of times per file. ! + ! 0. Each time gets its own file ! + ! -1. One file per day ! + ! -2. One file per month ! + ! > 0. Multiple timepoints can be recorded to a single file reducing ! + ! the number of files and i/o time in post-processing. ! + ! Multiple timepoints should not be used in the history files ! + ! if you intend to use these for HISTORY runs. ! + !---------------------------------------------------------------------------------------! + NL%OUTFAST = 0 + NL%OUTSTATE = 0 + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! ICLOBBER -- What to do in case the model finds a file that it was supposed the ! + ! written? 0 = stop the run, 1 = overwrite without warning. ! + ! FRQFAST -- time interval between analysis files, units defined by UNITFAST. ! + ! FRQSTATE -- time interval between history files, units defined by UNITSTATE. ! + !---------------------------------------------------------------------------------------! + NL%ICLOBBER = 1 + NL%FRQFAST = 3600. + NL%FRQSTATE = 1. + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! FFILOUT -- Path and prefix for analysis files (all but history/restart). ! + ! SFILOUT -- Path and prefix for history files. ! + !---------------------------------------------------------------------------------------! + NL%FFILOUT = '@FFILOUT@' + NL%SFILOUT = '@SFILOUT@' + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! IED_INIT_MODE -- This controls how the plant community and soil carbon pools are ! + ! initialised. ! + ! ! + ! -1. Start from a true bare ground run, or an absolute desert run. This will ! + ! never grow any plant. ! + ! 0. Start from near-bare ground (only a few seedlings from each PFT to be included ! + ! in this run). ! + ! 1. (Deprecated) This will use history files written by ED-1.0. It will read the ! + ! ecosystem state (like biomass, LAI, plant density, etc.), but it will start ! + ! the thermodynamic state as a new simulation. ! + ! 2. (Deprecated) Same as 1, but it uses history files from ED-2.0 without multiple ! + ! sites, and with the old PFT numbers. ! + ! 3. Same as 1, but using history files from ED-2.0 with multiple sites and ! + ! TOPMODEL hydrology. ! + ! 4. Same as 1, but using ED2.1 H5 history/state files that take the form: ! + ! 'dir/prefix-gxx.h5' ! + ! Initialization files MUST end with -gxx.h5 where xx is a two digit integer ! + ! grid number. Each grid has its own initialization file. As an example, if a ! + ! user has two files to initialize their grids with: ! + ! example_file_init-g01.h5 and example_file_init-g02.h5 ! + ! SFILIN = 'example_file_init' ! + ! ! + ! 5. This is similar to option 4, except that you may provide several files ! + ! (including a mix of regional and POI runs, each file ending at a different ! + ! date). This will not check date nor grid structure, it will simply read all ! + ! polygons and match the nearest neighbour to each polygon of your run. SFILIN ! + ! must have the directory common to all history files that are sought to be used,! + ! up to the last character the files have in common. For example if your files ! + ! are ! + ! /mypath/P0001-S-2000-01-01-000000-g01.h5, ! + ! /mypath/P0002-S-1966-01-01-000000-g02.h5, ! + ! ... ! + ! /mypath/P1000-S-1687-01-01-000000-g01.h5: ! + ! SFILIN = '/mypath/P' ! + ! ! + ! 6 - Initialize with ED-2 style files without multiple sites, exactly like option ! + ! 2, except that the PFT types are preserved. ! + ! ! + ! 7. Initialize from a list of both POI and gridded ED2.1 state files, organized ! + ! in the same manner as 5. This method overrides the soil database info and ! + ! takes the soil texture and soil moisture information from the initializing ! + ! ED2.1 state file. It allows for different layering, and assigns via nearest ! + ! neighbor. ! + !---------------------------------------------------------------------------------------! + NL%IED_INIT_MODE = @INIT_MODEL@ + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! EDRES -- Expected input resolution for ED2.0 files. This is not used unless ! + ! IED_INIT_MODE = 3. ! + !---------------------------------------------------------------------------------------! + NL%EDRES = 1.0 + !---------------------------------------------------------------------------------------! + + + !---------------------------------------------------------------------------------------! + ! SFILIN -- The meaning and the size of this variable depends on the type of run, set ! + ! at variable RUNTYPE. ! + ! ! + ! 1. INITIAL. Then this is the path+prefix of the previous ecosystem state. This has ! + ! dimension of the number of grids so you can initialize each grid with a ! + ! different dataset. In case only one path+prefix is given, the same will ! + ! be used for every grid. Only some ecosystem variables will be set up ! + ! here, and the initial condition will be in thermodynamic equilibrium. ! + ! ! + ! 2. HISTORY. This is the path+prefix of the history file that will be used. Only the ! + ! path+prefix will be used, as the history for every grid must have come ! + ! from the same simulation. ! + !---------------------------------------------------------------------------------------! + NL%SFILIN = '@SITE_PSSCSS@' + !---------------------------------------------------------------------------------------! + + + + + !---------------------------------------------------------------------------------------! + ! History file information. These variables are used to continue a simulation from ! + ! a point other than the beginning. Time must be in UTC. ! + ! ! + ! IMONTHH -- the time of the history file. This is the only place you need to change ! + ! IDATEH dates for a HISTORY run. You may change IMONTHZ and related in case you ! + ! IYEARH want to extend the run, but yo should NOT change IMONTHA and related. ! + ! ITIMEH ! + !---------------------------------------------------------------------------------------! + NL%IYEARH = 2000 ! Year + NL%IMONTHH = 08 ! Month + NL%IDATEH = 01 ! Day + NL%ITIMEH = 0000 ! UTC + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! NZG - Number of soil layers. One value for all regions and polygons of interest. ! + ! NZS - Maximum number of snow/water pounding layers. One value for all regions ! + ! and polygons of interest. This is used only when snow is accumulating. ! + ! If only liquid water is standing, the water will be always collapsed ! + ! into a single layer. ! + !---------------------------------------------------------------------------------------! + NL%NZG = 16 + NL%NZS = 4 + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! ISOILFLG -- This controls how to initialise soil texture. This must be a list with ! + ! N_ED_REGION+N_POI elements. The first N_ED_REGION elements correspond to ! + ! each gridded domain (from first to last). Elements between N_ED_REGION+1 ! + ! and N_ED_REGION+N_POI correspond to the polygons of interest (from 1 to ! + ! N_POI. Options are: ! + ! 1 -- Read in soil textural class from the files set in SOIL_DATABASE. ! + ! 2 -- Assign either the value set by NSLCON (see below) or define soil ! + ! texture from SLXSAND and SLXCLAY. ! + !---------------------------------------------------------------------------------------! + NL%ISOILFLG = 2 + !---------------------------------------------------------------------------------------! + + + + + !---------------------------------------------------------------------------------------! + ! NSLCON -- ED-2 Soil classes that the model will use when ISOILFLG is set to 2. ! + ! Possible values are: ! + !---------------------------------------------------------------------------------------! + ! 1 -- sand | 7 -- silty clay loam | 13 -- bedrock ! + ! 2 -- loamy sand | 8 -- clayey loam | 14 -- silt ! + ! 3 -- sandy loam | 9 -- sandy clay | 15 -- heavy clay ! + ! 4 -- silt loam | 10 -- silty clay | 16 -- clayey sand ! + ! 5 -- loam | 11 -- clay | 17 -- clayey silt ! + ! 6 -- sandy clay loam | 12 -- peat ! + !---------------------------------------------------------------------------------------! + NL%NSLCON = 6 + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! ISOILCOL -- LEAF-3 and ED-2 soil colour classes that the model will use when ISOILFLG ! + ! is set to 2. Soil classes are from 1 to 20 (1 = lightest; 20 = darkest). ! + ! The values are the same as CLM-4.0. The table is the albedo for visible ! + ! and near infra-red. ! + !---------------------------------------------------------------------------------------! + ! ! + ! |-----------------------------------------------------------------------| ! + ! | | Dry soil | Saturated | | Dry soil | Saturated | ! + ! | Class |-------------+-------------| Class +-------------+-------------| ! + ! | | VIS | NIR | VIS | NIR | | VIS | NIR | VIS | NIR | ! + ! |-------+------+------+------+------+-------+------+------+------+------| ! + ! | 1 | 0.36 | 0.61 | 0.25 | 0.50 | 11 | 0.24 | 0.37 | 0.13 | 0.26 | ! + ! | 2 | 0.34 | 0.57 | 0.23 | 0.46 | 12 | 0.23 | 0.35 | 0.12 | 0.24 | ! + ! | 3 | 0.32 | 0.53 | 0.21 | 0.42 | 13 | 0.22 | 0.33 | 0.11 | 0.22 | ! + ! | 4 | 0.31 | 0.51 | 0.20 | 0.40 | 14 | 0.20 | 0.31 | 0.10 | 0.20 | ! + ! | 5 | 0.30 | 0.49 | 0.19 | 0.38 | 15 | 0.18 | 0.29 | 0.09 | 0.18 | ! + ! | 6 | 0.29 | 0.48 | 0.18 | 0.36 | 16 | 0.16 | 0.27 | 0.08 | 0.16 | ! + ! | 7 | 0.28 | 0.45 | 0.17 | 0.34 | 17 | 0.14 | 0.25 | 0.07 | 0.14 | ! + ! | 8 | 0.27 | 0.43 | 0.16 | 0.32 | 18 | 0.12 | 0.23 | 0.06 | 0.12 | ! + ! | 9 | 0.26 | 0.41 | 0.15 | 0.30 | 19 | 0.10 | 0.21 | 0.05 | 0.10 | ! + ! | 10 | 0.25 | 0.39 | 0.14 | 0.28 | 20 | 0.08 | 0.16 | 0.04 | 0.08 | ! + ! |-----------------------------------------------------------------------| ! + ! ! + ! Soil type 21 is a special case in which we use the albedo method that used to be ! + ! the default in ED-2.1. ! + !---------------------------------------------------------------------------------------! + NL%ISOILCOL = 14 + !---------------------------------------------------------------------------------------! + + + + + !---------------------------------------------------------------------------------------! + ! These variables are used to define the soil properties when you don't want to use ! + ! the standard soil classes. ! + ! ! + ! SLXCLAY -- Prescribed fraction of clay [0-1] ! + ! SLXSAND -- Prescribed fraction of sand [0-1]. ! + ! ! + ! They are used only when ISOILFLG is 2, both values are between 0. and 1., and ! + ! their sum doesn't exceed 1. In case ISOILFLG is 2 but the fractions do not meet the ! + ! criteria, ED-2 uses NSLCON instead. ! + !---------------------------------------------------------------------------------------! + NL%SLXCLAY = 0.345 + NL%SLXSAND = 0.562 + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! Soil grid and initial conditions in case no file is provided. Provide NZG values ! + ! for the following variables (always from deepest to shallowest layer). ! + ! ! + ! SLZ - depth of the bottom of each soil layer [m]. Values must be negative. ! + ! SLMSTR - this is the initial soil moisture, now given as the soil moisture index. ! + ! -1 = dry air soil moisture ! + ! 0 = wilting point ! + ! 1 = field capacity ! + ! 2 = porosity (saturation) ! + ! Values can be fraction, in which case they will be linearly interpolated ! + ! between the special points (e.g. 0.5 will put soil moisture half way ! + ! between the wilting point and field capacity). ! + ! STGOFF - initial temperature offset (soil temperature = air temperature + offset) ! + !---------------------------------------------------------------------------------------! + NL%SLZ = -8.000,-7.072,-6.198,-5.380,-4.617,-3.910,-3.259,-2.664,-2.127,-1.648, + -1.228,-0.866,-0.566,-0.326,-0.150,-0.040 + NL%SLMSTR = 1.000, 1.000, 1.000, 1.000, 1.000, 1.000, 1.000, 1.000, 1.000, 1.000, + 1.000, 1.000, 1.000, 1.000, 1.000, 1.000 + NL%STGOFF = 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, + 0.000, 0.000, 0.000, 0.000, 0.000, 0.000 + !---------------------------------------------------------------------------------------! + + + + + !---------------------------------------------------------------------------------------! + ! Input databases ! + ! VEG_DATABASE -- vegetation database, used only to determine the land/water mask. ! + ! Fill with the path and the prefix. ! + ! SOIL_DATABASE -- soil database, used to determine the soil type. Fill with the ! + ! path and the prefix. ! + ! LU_DATABASE -- land-use change disturbance rates database, used only when ! + ! IANTH_DISTURB is set to 1. Fill with the path and the prefix. ! + ! PLANTATION_FILE -- Character string for the path to the forest plantation fraction ! + ! file. This is used only when IANTH_DISTURB is set to 1 and ! + ! the user wants to simulate forest plantations. Otherwise, leave ! + ! it empty (PLANTATION_FILE='') ! + ! THSUMS_DATABASE -- input directory with dataset to initialise chilling-degree and ! + ! growing-degree days, which is used to drive the cold-deciduous ! + ! phenology (you must always provide this, even when your PFTs are ! + ! not cold deciduous). ! + ! ED_MET_DRIVER_DB -- File containing information for meteorological driver ! + ! instructions (the "header" file). ! + ! OBSTIME_DB -- File containing times of desired IOOUTPUT ! + ! Reference file: /ED/run/obstime_template.time ! + ! SOILSTATE_DB -- If ISOILSTATEINIT=1, this variable specifies the full path of ! + ! the file that contains soil moisture and temperature ! + ! information. ! + ! SOILDEPTH_DB -- If ISOILDEPTHFLG=1, this variable specifies the full path of the ! + ! file that contains soil moisture and temperature information. ! + !---------------------------------------------------------------------------------------! + NL%VEG_DATABASE = '@ED_VEG@' + NL%SOIL_DATABASE = '@ED_SOIL@' + NL%LU_DATABASE = '@ED_LU@' + NL%PLANTATION_FILE = '' + NL%THSUMS_DATABASE = '@ED_THSUM@' + NL%ED_MET_DRIVER_DB = '@SITE_MET@' + NL%OBSTIME_DB = '' !Reference file: /ED/run/obstime_template.time + NL%SOILSTATE_DB = '/mypath/soil_data/temp+moist/STW1996OCT.dat' + NL%SOILDEPTH_DB = '/mypath/soil_data/depth/H250mBRD/H250mBRD_' + !---------------------------------------------------------------------------------------! + + + + + !---------------------------------------------------------------------------------------! + ! ISOILSTATEINIT -- Variable controlling how to initialise the soil temperature and ! + ! moisture ! + ! 0. Use SLMSTR and STGOFF. ! + ! 1. Read from SOILSTATE_DB. ! + ! ISOILDEPTHFLG -- Variable controlling how to initialise soil depth ! + ! 0. Constant, always defined by the first (deepest) SLZ layer. ! + ! 1. Read from SOILDEPTH_DB (ED-1.0 style, ascii file). ! + ! 2. Read from SOILDEPTH_DB (ED-2.2 style, hdf5 files + header). ! + !---------------------------------------------------------------------------------------! + NL%ISOILSTATEINIT = 0 + NL%ISOILDEPTHFLG = 0 + !---------------------------------------------------------------------------------------! + + + + + !---------------------------------------------------------------------------------------! + ! ISOILBC -- This controls the soil moisture boundary condition at the bottom. Choose ! + ! the option according to the site characteristics. ! + ! 0. Flat bedrock. Flux from the bottom of the bottommost layer is zero. ! + ! 1. Gravitational flow (free drainage). The flux from the bottom of the ! + ! bottommost layer is due to gradient of height only. ! + ! 2. Lateral drainage. Similar to free drainage, but the gradient is ! + ! reduced by the slope not being completely vertical. The reduction is ! + ! controlled by variable SLDRAIN. In the future options 0, 1, and 2 may ! + ! be combined into a single option. ! + ! 3. Aquifer. Soil moisture of the ficticious layer beneath the bottom is ! + ! always at saturation. ! + !---------------------------------------------------------------------------------------! + NL%ISOILBC = 1 + !---------------------------------------------------------------------------------------! + + + + + !---------------------------------------------------------------------------------------! + ! SLDRAIN -- This is used only when ISOILBC is set to 2. In this case SLDRAIN is the ! + ! equivalent slope that will slow down drainage. If this is set to zero, ! + ! then lateral drainage reduces to flat bedrock, and if this is set to 90, ! + ! then lateral drainage becomes free drainage. SLDRAIN must be between 0 ! + ! and 90. ! + !---------------------------------------------------------------------------------------! + NL%SLDRAIN = 90. + !---------------------------------------------------------------------------------------! + + + + + !---------------------------------------------------------------------------------------! + ! IVEGT_DYNAMICS -- The vegetation dynamics scheme. ! + ! 0. No vegetation dynamics, the initial state will be preserved, ! + ! even though the model will compute the potential values. This ! + ! option is useful for theoretical simulations only. ! + ! 1. Normal ED vegetation dynamics (Moorcroft et al 2001). ! + ! The normal option for almost any simulation. ! + !---------------------------------------------------------------------------------------! + NL%IVEGT_DYNAMICS = 1 + !---------------------------------------------------------------------------------------! + + + !---------------------------------------------------------------------------------------! + ! IBIGLEAF -- Do you want to run ED as a 'big leaf' model? ! + ! 0. No, use the standard size- and age-structure (Moorcroft et al. 2001, ! + ! Ecol. Monogr.). This is the recommended method for most ! + ! applications. ! + ! 1. 'big leaf' ED (Levine et al. 2016, PNAS): this will have no ! + ! horizontal or vertical heterogeneities; 1 patch per PFT and 1 cohort ! + ! per patch; no vertical growth, recruits will 'appear' instantaneously ! + ! at maximum height. ! + ! ! + ! N.B. if you set IBIGLEAF to 1, you MUST turn off the crown model (CROWN_MOD = 0) ! + !---------------------------------------------------------------------------------------! + NL%IBIGLEAF = 0 + !---------------------------------------------------------------------------------------! + + + + + !---------------------------------------------------------------------------------------! + ! INTEGRATION_SCHEME -- The biophysics integration scheme. ! + ! 0. (Deprecated) Euler step. The fastest, but it has only a ! + ! very crude estimate of time-step errors. ! + ! 1. (ED-2.2 default) Fourth-order Runge-Kutta method. ! + ! 2. (Deprecated) Second-order Runge-Kutta method (Heun's). ! + ! This is not faster than option 1, and it will be eventually ! + ! removed. ! + ! 3. (Under development) Hybrid Stepping (Backward Euler BDF2 ! + ! implicit step for the canopy air and leaf temp, forward ! + ! Euler for else). This has not been thoroughly tested for ! + ! energy, water, and CO2 conservation. ! + !---------------------------------------------------------------------------------------! + NL%INTEGRATION_SCHEME = 1 + !---------------------------------------------------------------------------------------! + + + + + !---------------------------------------------------------------------------------------! + ! NSUB_EULER -- The number of sub-steps in case we are running forward Euler. The ! + ! maximum time step will then be DTLSM / NSUB_EULER. This is needed to ! + ! make sure we don't take too long steps with Euler, as we cannot ! + ! estimate errors using first-order schemes. This number is ignored ! + ! except when INTEGRATION_SCHEME is 0. ! + !---------------------------------------------------------------------------------------! + NL%NSUB_EULER = 40 + !---------------------------------------------------------------------------------------! + + + + + !---------------------------------------------------------------------------------------! + ! RK4_TOLERANCE -- This is the relative tolerance for Runge-Kutta or Heun's ! + ! integration. Currently the valid range is between 1.e-7 and 1.e-1, ! + ! but recommended values are between 1.e-4 and 1.e-2. ! + !---------------------------------------------------------------------------------------! + NL%RK4_TOLERANCE = 0.01 + !---------------------------------------------------------------------------------------! + + + + + !---------------------------------------------------------------------------------------! + ! IBRANCH_THERMO -- This determines whether branches should be included in the ! + ! vegetation thermodynamics and radiation or not. ! + ! 0. (Legacy) No branches in energy/radiation. ! + ! 1. (ED-2.2 default) Branches are accounted in the energy and ! + ! radiation. Branchwood and leaf are treated separately in the ! + ! canopy radiation scheme, but solved as a single pool in the ! + ! biophysics integration. ! + ! 2. (Beta) Similar to 1, but branches are treated as separate pools ! + ! in the biophysics (thus doubling the number of prognostic ! + ! variables). ! + !---------------------------------------------------------------------------------------! + NL%IBRANCH_THERMO = 1 + !---------------------------------------------------------------------------------------! + + + + + !---------------------------------------------------------------------------------------! + ! IPHYSIOL -- This variable will determine the functional form that will control how ! + ! the various parameters will vary with temperature, and how the CO2 ! + ! compensation point for gross photosynthesis (Gamma*) will be found. ! + ! Options are: ! + ! ! + ! 0 -- (Legacy) Original ED-2.1, we use the "Arrhenius" function as in Foley et al. ! + ! (1996, Global Biogeochem. Cycles) and Moorcroft et al. (2001, Ecol. Monogr.). ! + ! Gamma* is found using the parameters for tau as in Foley et al. (1996). This ! + ! option causes optimal temperature to be quite low, even in the tropics (Rogers ! + ! et al. 2017, New Phytol.). ! + ! 1 -- (Beta) Similar to case 0, but we use Jmax to determine the RubP-regeneration ! + ! (aka light) limitation case, account for the triose phosphate utilisation ! + ! limitation case (C3), and use the Michaelis-Mentel coefficients along with other ! + ! parameters from von Caemmerer (2000, Biochemical models of leaf photosynthesis). ! + ! 2 -- (ED-2.2 default) Collatz et al. (1991, Agric. For. Meteorol.). We use the power ! + ! (Q10) equations, with Collatz et al. (1991) parameters for compensation point, ! + ! and the Michaelis-Mentel coefficients. The correction for high and low ! + ! temperatures are the same as in Moorcroft et al. (2001). ! + ! 3 -- (Beta) Similar to case 2, but we use Jmax to determine the RubP-regeneration ! + ! (aka light) limitation case, account for the triose phosphate utilisation ! + ! limitation case (C3), and use the Michaelis-Mentel coefficients along with other ! + ! parameters from von Caemmerer (2000). ! + ! 4 -- (Beta) Use "Arrhenius" function as in Harley et al. (1991). This must be run ! + ! with ISTOMATA_SCHEME = 1 ! + !---------------------------------------------------------------------------------------! + NL%IPHYSIOL = 2 + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! IALLOM -- Which allometry to use (this mostly affects tropical PFTs). Temperate PFTs ! + ! will use the new root allometry and the maximum crown area unless IALLOM is ! + ! set to 0). ! + ! 0. (Legacy) Original ED-1.0, included for back compatibility. ! + ! 1. (Legacy) ! + ! a. The coefficients for structural biomass are set so the total AGB ! + ! is similar to Baker et al. (2004, Glob. Change Biol.), equation 2. ! + ! b. Experimental root depth that makes canopy trees to have root depths ! + ! of 5m and grasses/seedlings at 0.5 to have root depth of 0.5 m. ! + ! c. Crown area defined as in Poorter et al. (2006, Ecology), imposing ! + ! maximum crown area. ! + ! 2. (ED-2.2 default) Similar to 1, but with a few extra changes. ! + ! a. Height -> DBH allometry as in Poorter et al. (2006) ! + ! b. Balive is retuned, using a few leaf biomass allometric equations for ! + ! a few genera in Costa Rica. References: ! + ! Cole and Ewel (2006, Forest Ecol. Manag.), and Calvo-Alvarado et al. ! + ! (2008, Tree Physiol.). ! + ! 3. (Beta) Updated allometric for tropical PFTs based on data from ! + ! Sustainable Landscapes Brazil (Height and crown area), Chave et al. ! + ! (2014, Glob. Change Biol.) (biomass) and the BAAD data base, Falster et ! + ! al. (2015, Ecology) (leaf area). Both leaf and structural biomass take ! + ! DBH and Height as dependent variables, and DBH-Height takes a simpler ! + ! log-linear form fitted using SMA so it can be inverted (useful for ! + ! airborne lidar initialisation). ! + !---------------------------------------------------------------------------------------! + NL%IALLOM = 3 + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! ECONOMICS_SCHEME -- Temporary variable for testing the relationship amongst traits in ! + ! the tropics. ! + ! 0. (ED-2.2 default) ED-2.1 standard, based on Reich et al. (1997, ! + ! PNAS), Moorcroft et al. (2001, Ecol. Monogr.) and some updates ! + ! following Kim et al. (2012, Glob. Change Biol.). ! + ! 1. When available, trait relationships were derived from more ! + ! up-to-date data sets, including the TRY database (Kattge et ! + ! al. 2011, Glob. Change Biol.), NGEE-Tropics (Norby et al. ! + ! 2017, New Phytol.), RAINFOR (Bahar et al. 2017, New Phytol.), ! + ! and GLOPNET (Wright et al. 2004, Nature). Check ! + ! ed_params.f90 for details. ! + !---------------------------------------------------------------------------------------! + NL%ECONOMICS_SCHEME = 0 + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! IGRASS -- This controls the dynamics and growth calculation for grasses. ! + ! ! + ! 0. (Legacy) Original ED-1/ED-2.0 method, grasses are miniature trees, grasses have ! + ! heartwood biomass (albeit small), and growth happens monthly. ! + ! 1. (ED-2.2 default). Heartwood biomass is always 0, height is a function of leaf ! + ! biomass , and growth happens daily. With this option, grasses are evergreen ! + ! regardless of IPHEN_SCHEME. ! + !---------------------------------------------------------------------------------------! + NL%IGRASS = 1 + !---------------------------------------------------------------------------------------! + + + + + !---------------------------------------------------------------------------------------! + ! IPHEN_SCHEME -- It controls the phenology scheme. Even within each scheme, the ! + ! actual phenology will be different depending on the PFT. ! + ! ! + ! -1: (ED-2.2 default for evergreen tropical). ! + ! grasses - evergreen; ! + ! tropical - evergreen; ! + ! conifers - evergreen; ! + ! hardwoods - cold-deciduous (Botta et al.); ! + ! ! + ! 0: (Deprecated). ! + ! grasses - drought-deciduous (old scheme); ! + ! tropical - drought-deciduous (old scheme); ! + ! conifers - evergreen; ! + ! hardwoods - cold-deciduous; ! + ! ! + ! 1: (ED-2.2 default for prescribed phenology; deprecated for tropical PFTs). ! + ! phenology is prescribed for cold-deciduous broadleaf trees. ! + ! ! + ! 2: (ED-2.2 default). ! + ! grasses - drought-deciduous (new scheme); ! + ! tropical - drought-deciduous (new scheme); ! + ! conifers - evergreen; ! + ! hardwoods - cold-deciduous; ! + ! ! + ! 3: (Beta). ! + ! grasses - drought-deciduous (new scheme); ! + ! tropical - drought-deciduous (light phenology); ! + ! conifers - evergreen; ! + ! hardwoods - cold-deciduous; ! + ! ! + ! Old scheme: plants shed their leaves once instantaneous amount of available water ! + ! becomes less than a critical value. ! + ! New scheme: plants shed their leaves once a 10-day running average of available ! + ! water becomes less than a critical value. ! + !---------------------------------------------------------------------------------------! + NL%IPHEN_SCHEME = @PHENOL_SCHEME@ + !---------------------------------------------------------------------------------------! + + + !---------------------------------------------------------------------------------------! + ! Parameters that control the phenology response to radiation, used only when ! + ! IPHEN_SCHEME = 3. ! + ! ! + ! RADINT -- Intercept ! + ! RADSLP -- Slope. ! + !---------------------------------------------------------------------------------------! + NL%RADINT = -11.3868 + NL%RADSLP = 0.0824 + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! REPRO_SCHEME -- This controls plant reproduction and dispersal. ! + ! 0. Reproduction off. Useful for very short runs only. ! + ! 1. (Legacy) Original reproduction scheme. Seeds are exchanged ! + ! between patches belonging to the same site, but they can't go ! + ! outside their original site. ! + ! 2. (ED-2.2 default) Similar to 1, but seeds are exchanged between ! + ! patches belonging to the same polygon, even if they are in ! + ! different sites. They can't go outside their original polygon, ! + ! though. This is the same as option 1 if there is only one site ! + ! per polygon. ! + ! 3. (Beta) Similar to 2, but allocation to reproduction may be set as ! + ! a function of height using an asymptotic curve. ! + !---------------------------------------------------------------------------------------! + NL%REPRO_SCHEME = 2 + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! LAPSE_SCHEME -- This specifies the met lapse rate scheme: ! + ! 0. (ED-2.2 default) No lapse rates ! + ! 1. (Beta) Phenomenological, global ! + ! 2. (Non-functional) Phenomenological, local ! + ! 3. (Non-functional) Mechanistic ! + !---------------------------------------------------------------------------------------! + NL%LAPSE_SCHEME = 0 + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! CROWN_MOD -- Specifies how tree crowns are represent in the canopy radiation model, ! + ! and in the turbulence scheme depending on ICANTURB. ! + ! 0. (ED-2.2 default) Flat-top, infinitesimally thin crowns. ! + ! 1. (Under development) Finite radius mixing model (Dietze). ! + ! This is only implemented for direct radiation with ICANRAD=0. ! + !---------------------------------------------------------------------------------------! + NL%CROWN_MOD = 0 + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! The following variables control the canopy radiation solver. ! + ! ! + ! ICANRAD -- Specifies how canopy radiation is solved. This variable sets both ! + ! shortwave and longwave. ! + ! 0. (Deprecated) original two-stream model from Medvigy (2006), with ! + ! the possibility to apply finite crown area to direct shortwave ! + ! radiation. This option is no longer supported and may be removed ! + ! in future releases. ! + ! 1. (Deprecated) Multiple scattering model from Zhao and Qualls (2005, ! + ! 2006, Water Resour. Res.). This option is no longer supported and ! + ! may be removed in future releases. ! + ! 2. (ED-2.2 default) Updated two-stream model from Liou (2002, An ! + ! introduction to atmospheric radiation). ! + ! IHRZRAD -- Specifies how horizontal canopy radiation is solved. ! + ! 0. (ED-2.2 default) No horizontal patch shading. All patches ! + ! receive the same amount of light at the top. ! + ! 1. (Beta) A realized map of the plant community is built by randomly ! + ! assigning gaps associated with gaps (number of gaps proportional ! + ! to the patch area), and populating them with individuals, ! + ! respecting the cohort distribution in each patch. The crown ! + ! closure index is calculated for the entire landscape and used ! + ! to change the amount of direct light reaching the top of the ! + ! canopy. Patches are then split into 1-3 patches based on the ! + ! light condition, so expect simulations to be slower. (Morton et ! + ! al., in review). ! + ! 2. (Beta) Similar to option 1, except that height for trees with ! + ! DBH > DBH_crit are rescaled to calculate CCI. ! + ! 3. (Beta) This creates patches following IHRZRAD = 1, but then ! + ! assumes that the light scaling factor is 1 for all patches. This ! + ! is only useful to isolate the effect of heterogeneous ! + ! illumination from the patch count. ! + ! 4. (Beta) Similar to option 3, but it applies the same method as ! + ! IHRZRAD=2. ! + !---------------------------------------------------------------------------------------! + NL%ICANRAD = 2 + NL%IHRZRAD = 0 + !---------------------------------------------------------------------------------------! + + + !---------------------------------------------------------------------------------------! + ! The variables below will be eventually removed from ED2IN, use XML initialisation ! + ! file to set these parameters instead. ! + ! LTRANS_VIS -- Leaf transmittance for tropical plants - Visible/PAR ! + ! LTRANS_NIR -- Leaf transmittance for tropical plants - Near Infrared ! + ! LREFLECT_VIS -- Leaf reflectance for tropical plants - Visible/PAR ! + ! LREFLECT_NIR -- Leaf reflectance for tropical plants - Near Infrared ! + ! ORIENT_TREE -- Leaf orientation factor for tropical trees. Extremes are: ! + ! -1. All leaves are oriented in the vertical ! + ! 0. Leaf orientation is perfectly random ! + ! 1. All leaves are oriented in the horizontal ! + ! In practice, acceptable values range from -0.4 and 0.6 ! + ! (Goudriaan, 1977). ! + ! ORIENT_GRASS -- Leaf orientation factor for tropical grasses. Extremes are: ! + ! -1. All leaves are oriented in the vertical ! + ! 0. Leaf orientation is perfectly random ! + ! 1. All leaves are oriented in the horizontal ! + ! In practice, acceptable values range from -0.4 and 0.6 ! + ! (Goudriaan, 1977). ! + ! CLUMP_TREE -- Clumping factor for tropical trees. Extremes are: ! + ! lim -> 0. Black hole (0 itself is unacceptable) ! + ! 1. Homogeneously spread over the layer (i.e., no clumping) ! + ! CLUMP_GRASS -- Clumping factor for tropical grasses. Extremes are: ! + ! lim -> 0. Black hole (0 itself is unacceptable) ! + ! 1. Homogeneously spread over the layer (i.e., no clumping) ! + !---------------------------------------------------------------------------------------! + NL%LTRANS_VIS = 0.05 + NL%LTRANS_NIR = 0.2 + NL%LREFLECT_VIS = 0.1 + NL%LREFLECT_NIR = 0.4 + NL%ORIENT_TREE = 0.1 + NL%ORIENT_GRASS = 0.0 + NL%CLUMP_TREE = 0.8 + NL%CLUMP_GRASS = 1.0 + !---------------------------------------------------------------------------------------! + + + + + !---------------------------------------------------------------------------------------! + ! IGOUTPUT -- In case IHRZRAD is not zero, should the model write the patch table and ! + ! gap realisation files? (0 -- no; 1 -- yes). Note these files are still ! + ! in text files so they may take considerable disk space. ! + ! GFILOUT -- Prefix for the output patch table/gap files. ! + !---------------------------------------------------------------------------------------! + NL%IGOUTPUT = 0 + NL%GFILOUT = '/mypath/generic-prefix' + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! DECOMP_SCHEME -- This specifies the soil Carbon (decomposition) model. ! + ! ! + ! 0 - (Deprecated) ED-2.0 default. Exponential with low-temperature limitation only. ! + ! This option is known to cause excessive accumulation of soil carbon in the ! + ! tropics. ! + ! 1 - (Beta) Lloyd and Taylor (1994, Funct. Ecol.) model. Aditional parameters must be ! + ! set in an XML file. ! + ! 2 - (ED-2.2 default) Similar to ED-1.0 and CENTURY model, heterotrophic respiration ! + ! reaches a maximum at around 38C (using the default parameters), then quickly ! + ! falls to zero at around 50C. It applies a similar function for soil moisture, ! + ! which allows higher decomposition rates when it is close to the optimal, plumet- ! + ! ting when it is almost saturated. ! + ! 3 - (Beta) Similar to option 0, but it uses an empirical moisture limit equation ! + ! from Moyano et al. (2012), Biogeosciences. ! + ! 4 - (Beta) Similar to option 1, but it uses an empirical moisture limit equation ! + ! from Moyano et al. (2012), Biogeosciences. ! + ! 5 - (Beta) Based on Bolker et al. (1998, Ecol. Appl.) CENTURY model. Five necromass ! + ! pools (litter aka fast, structural, microbial, humified aka slow, and passive). ! + ! Temperature and moisture functions are the same as 2. ! + !---------------------------------------------------------------------------------------! + NL%DECOMP_SCHEME = 2 + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! H2O_PLANT_LIM -- this determines whether plant photosynthesis can be limited by ! + ! soil moisture, the FSW, defined as FSW = Supply / (Demand + Supply). ! + ! ! + ! Demand is always the transpiration rates in case soil moisture is not limiting (the ! + ! psi_0 term times LAI). The supply is determined by ! + ! ! + ! Kw * nplant * Broot * Available_Water, ! + ! ! + ! and the definition of available water changes depending on H2O_PLANT_LIM: ! + ! 0. Force FSW = 1 (effectively available water is infinity). ! + ! 1. (Legacy) Available water is the total soil water above wilting point, integrated ! + ! across all layers within the rooting zone. ! + ! 2. (ED-2.2 default) Available water is the soil water at field capacity minus wilt- ! + ! ing point, scaled by the so-called wilting factor: ! + ! ! + ! (psi(k) - (H - z(k)) - psi_wp) / (psi_fc - psi_wp) ! + ! ! + ! where psi is the matric potentital at layer k, z is the layer depth, H it the ! + ! crown height and psi_fc and psi_wp are the matric potentials at wilting point ! + ! and field capacity. ! + ! 3. (Beta) Use leaf water potential to modify fsw following Powell et al. (2017). ! + ! This setting requires PLANT_HYDRO_SCHEME to be non-zero. ! + ! 4. (Beta) Use leaf water potential to modify the optimization-based stomatal model ! + ! following Xu et al. (2016). This setting requires PLANT_HYDRO_SCHEME to be ! + ! non-zero values and set ISTOMATA_SCHEME to 1. ! + ! 5. (Beta) Similar to 2, but the water supply directly affects gsw, as opposed to ! + ! fsw. This is done by making D0 a function of soil moisture. Note that this ! + ! still uses Kw but Kw must be significantly lower, at least for tropical trees ! + ! (1/15 - 1/10 of the original). This works only with PLANT_HYDRO_SCHEME set to 0. ! + !---------------------------------------------------------------------------------------! + NL%H2O_PLANT_LIM = 2 + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! PLANT_HYDRO_SCHEME -- Flag to set dynamic plant hydraulics. ! + ! 0 - (ED-2.2 default) No dynamic hydraulics (leaf and wood are always saturated). ! + ! 1 - (ED-2.2 alternative) Track plant hydrodynamics. Model framework from X16, using ! + ! parameters from C16. ! + ! 2 - (Deprecated) Track plant hydrodynamics. Model framework from X16, using ! + ! parameters from X16. ! + ! ! + ! References: ! + ! ! + ! Christoffersen BO, Gloor M, Fauset S, Fyllas NM, Galbraith DR, Baker TR, Kruijt B, ! + ! Rowland L, Fisher RA, Binks OJ et al. 2016. Linking hydraulic traits to tropical ! + ! forest function in a size- structured and trait-driven model (TFS v.1-Hydro). ! + ! Geosci. Model Dev., 9: 4227-4255. doi:10.5194/gmd- 9-4227-2016 (C16). ! + ! ! + ! Xu X, Medvigy D, Powers JS, Becknell JM , Guan K. 2016. Diversity in plant hydraulic ! + ! traits explains seasonal and inter-annual variations of vegetation dynamics in ! + ! seasonally dry tropical forests. New Phytol., 212: 80-95. doi:10.1111/nph.14009 ! + ! (X16). ! + !---------------------------------------------------------------------------------------! + NL%PLANT_HYDRO_SCHEME = 0 + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! ISTRUCT_GROWTH_SCHEME -- Different methods to perform structural growth. ! + ! 0. (ED-2.2 default) Use all bstorage allocation to growth to increase heartwood. ! + ! This option will be eventually deprecated, as it creates problems for drought- ! + ! deciduous plants and for allometric settings that properly calculate sapwood ! + ! (IALLOM = 3). ! + ! 1. (ED-2.2 alternative) Correct the fraction of storage allocated to heartwood, so ! + ! storage has sufficient carbon to increment all living tissues in the upcoming ! + ! month. This option will eventually become the default. ! + !---------------------------------------------------------------------------------------! + NL%ISTRUCT_GROWTH_SCHEME = 1 + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! ISTOMATA_SCHEME -- Which stomatal conductance model to use. ! + ! 0. (ED-2.2 default) Leuning (L95) model. ! + ! 1. (Beta) Katul's optimization-based model (see X16) ! + ! ! + ! References: ! + ! ! + ! Leuning R. 1995. A critical appraisal of a combined stomatal-photosynthesis model for ! + ! C3 plants. Plant Cell Environ., 18: 339-355. ! + ! doi:10.1111/j.1365-3040.1995.tb00370.x (L95). ! + ! ! + ! Xu X, Medvigy D, Powers JS, Becknell JM , Guan K. 2016. Diversity in plant hydraulic ! + ! traits explains seasonal and inter-annual variations of vegetation dynamics in ! + ! seasonally dry tropical forests. New Phytol., 212: 80-95. doi:10.1111/nph.14009 ! + ! (X16). ! + !---------------------------------------------------------------------------------------! + NL%ISTOMATA_SCHEME = 0 + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! TRAIT_PLASTICITY_SCHEME -- Whether/How plant traits vary with local environment. ! + ! ! + ! 0 - (ED-2.2 default) No trait plasticity. Trait parameters for each PFT are fixed. ! + ! 1 - (Beta) Vm0, SLA and leaf turnover rate change annually with cohort light ! + ! environment. The parametrisation is based on Lloyd et al. (2010, ! + ! Biogeosciences), with additional data from Keenan and Niinemets (2016, Nat. ! + ! Plants) and Russo and Kitajima (2016, Tropical Tree Physiology book). For each ! + ! cohort, Vm0 and leaf turnover rates decrease, and SLA increases with shading. ! + ! The magnitude of changes is calculated using overtopping LAI and corresponding ! + ! extinction factors for each trait. This is not applicable to grass PFTs. ! + ! (Xu et al. in prep.) ! + ! 2 - (Beta) Similar to 1, but traits are updated monthly. ! + ! -1 - (Beta) Similar to 1, but use height to adjust SLA. ! + ! -2 - (Beta) Similar to 2, but use height to adjust SLA. ! + !---------------------------------------------------------------------------------------! + NL%TRAIT_PLASTICITY_SCHEME = 0 + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! IDDMORT_SCHEME -- This flag determines whether storage should be accounted in the ! + ! carbon balance. ! + ! 0 -- (Legacy) Carbon balance is done in terms of fluxes only. ! + ! 1 -- (ED-2.2 default) Carbon balance is offset by the storage ! + ! pool. Plants will be in negative carbon balance only when ! + ! they run out of storage and are still losing more carbon than ! + ! gaining. ! + ! ! + ! CBR_SCHEME -- This flag determines which carbon stress scheme is used: ! + ! 0 -- (ED-2.2 default) Single stress. CBR = cb/cb_mlmax ! + ! cb_mlmax is the carbon balance in full sun and no moisture ! + ! limitation ! + ! 1 -- (Legacy) Co-limitation from light and moisture (Longo et al. ! + ! 2018, New Phytol.). CBR_LIGHT = cb/cb_lightmax and ! + ! CBR_MOIST = cb/cb_moistmax. CBR_LIGHT and CBR_MOIST are then ! + ! weighted according to DDMORT_CONST (below) ! + ! 2 -- (Beta) Leibig Style, i.e. limitation from either light or ! + ! moisture depending on which is lower at a given point in time ! + ! CBR = cb/max(cb_lightmax, cb_moistmax) ! + ! ! + ! DDMORT_CONST -- CBR_Scheme = 1 only ! + ! This constant (k) determines the relative contribution of light ! + ! and soil moisture to the density-dependent mortality rate. Values ! + ! range from 0 (soil moisture only) to 1 (light only, which is the ! + ! ED-1.0 and ED-2.0 default). ! + ! ! + ! mort1 ! + ! mu_DD = ------------------------- ! + ! 1 + exp [ mort2 * CBR ] ! + ! ! + ! 1 DDMORT_CONST 1 - DDMORT_CONST ! + ! ------------ = ------------------ + ------------------ ! + ! CBR - CBR_SS CBR_LIGHT - CBR_SS CBR_MOIST - CBR_SS ! + ! ! + !---------------------------------------------------------------------------------------! + NL%IDDMORT_SCHEME = 1 + NL%CBR_SCHEME = 0 + NL%DDMORT_CONST = 0.8 + !---------------------------------------------------------------------------------------! + + + !---------------------------------------------------------------------------------------! + ! These variables will be eventually removed from ED2IN, use the XML initialisation ! + ! file to set these parameters Vm0 instead. The following variables are factors that ! + ! control photosynthesis and respiration. Note that some of them are relative values, ! + ! whereas others are absolute. ! + ! ! + ! VMFACT_C3 -- Factor multiplying the default Vm0 for C3 plants (1.0 = default). ! + ! VMFACT_C4 -- Factor multiplying the default Vm0 for C4 plants (1.0 = default). ! + ! MPHOTO_TRC3 -- Stomatal slope (M) for tropical C3 plants ! + ! MPHOTO_TEC3 -- Stomatal slope (M) for conifers and temperate C3 plants ! + ! MPHOTO_C4 -- Stomatal slope (M) for C4 plants. ! + ! BPHOTO_BLC3 -- cuticular conductance for broadleaf C3 plants [umol/m2/s] ! + ! BPHOTO_NLC3 -- cuticular conductance for needleleaf C3 plants [umol/m2/s] ! + ! BPHOTO_C4 -- cuticular conductance for C4 plants [umol/m2/s] ! + ! KW_GRASS -- Water conductance for trees, in m2/yr/kgC_root. This is used only ! + ! when H2O_PLANT_LIM is not 0. ! + ! KW_TREE -- Water conductance for grasses, in m2/yr/kgC_root. This is used only ! + ! when H2O_PLANT_LIM is not 0. ! + ! GAMMA_C3 -- The dark respiration factor (gamma) for C3 plants. In case this ! + ! number is set to 0, find the factor based on Atkin et al. (2015). ! + ! GAMMA_C4 -- The dark respiration factor (gamma) for C4 plants. In case this ! + ! number is set to 0, find the factor based on Atkin et al. (2015). ! + ! (assumed to be twice as large as C3 grasses, as Atkin et al. 2015 ! + ! did not estimate Rd0 for C4 grasses). ! + ! D0_GRASS -- The transpiration control in gsw (D0) for ALL grasses. ! + ! D0_TREE -- The transpiration control in gsw (D0) for ALL trees. ! + ! ALPHA_C3 -- Quantum yield of ALL C3 plants. This is only applied when ! + ! QUANTUM_EFFICIENCY_T = 0. ! + ! ALPHA_C4 -- Quantum yield of C4 plants. This is always applied. ! + ! KLOWCO2IN -- The coefficient that controls the PEP carboxylase limited rate of ! + ! carboxylation for C4 plants. ! + ! RRFFACT -- Factor multiplying the root respiration factor for ALL PFTs. ! + ! (1.0 = default). ! + ! GROWTHRESP -- The actual growth respiration factor (C3/C4 tropical PFTs only). ! + ! (1.0 = default). ! + ! LWIDTH_GRASS -- Leaf width for grasses, in metres. This controls the leaf boundary ! + ! layer conductance (gbh and gbw). ! + ! LWIDTH_BLTREE -- Leaf width for trees, in metres. This controls the leaf boundary ! + ! layer conductance (gbh and gbw). This is applied to broadleaf trees ! + ! only. ! + ! LWIDTH_NLTREE -- Leaf width for trees, in metres. This controls the leaf boundary ! + ! layer conductance (gbh and gbw). This is applied to conifer trees ! + ! only. ! + ! Q10_C3 -- Q10 factor for C3 plants (used only if IPHYSIOL is set to 2 or 3). ! + ! Q10_C4 -- Q10 factor for C4 plants (used only if IPHYSIOL is set to 2 or 3). ! + !---------------------------------------------------------------------------------------! + NL%VMFACT_C3 = 1.00 + NL%VMFACT_C4 = 1.00 + NL%MPHOTO_TRC3 = 9.0 + NL%MPHOTO_TEC3 = 7.2 + NL%MPHOTO_C4 = 5.2 + NL%BPHOTO_BLC3 = 10000. + NL%BPHOTO_NLC3 = 1000. + NL%BPHOTO_C4 = 10000. + NL%KW_GRASS = 900. + NL%KW_TREE = 600. + NL%GAMMA_C3 = 0.0145 + NL%GAMMA_C4 = 0.035 + NL%D0_GRASS = 0.016 + NL%D0_TREE = 0.016 + NL%ALPHA_C3 = 0.08 + NL%ALPHA_C4 = 0.055 + NL%KLOWCO2IN = 17949. + NL%RRFFACT = 1.000 + NL%GROWTHRESP = 0.333 + NL%LWIDTH_GRASS = 0.05 + NL%LWIDTH_BLTREE = 0.10 + NL%LWIDTH_NLTREE = 0.05 + NL%Q10_C3 = 2.4 + NL%Q10_C4 = 2.4 + !---------------------------------------------------------------------------------------! + + + + + !---------------------------------------------------------------------------------------! + ! THETACRIT -- Leaf drought phenology threshold. The sign matters here: ! + ! >= 0. -- This is the relative soil moisture above the wilting point ! + ! below which the drought-deciduous plants will start shedding ! + ! their leaves ! + ! < 0. -- This is the soil potential in MPa below which the drought- ! + ! -deciduous plants will start shedding their leaves. The wilt- ! + ! ing point is by definition -1.5MPa, so make sure that the value ! + ! is above -1.5. ! + !---------------------------------------------------------------------------------------! + NL%THETACRIT = -1.20 + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! QUANTUM_EFFICIENCY_T -- Which quantum yield model should to use for C3 plants ! + ! 0. (ED-2.2 default) Quantum efficiency is constant. ! + ! 1. (Beta) Quantum efficiency varies with temperature ! + ! following Ehleringer (1978, Oecologia) polynomial fit. ! + !---------------------------------------------------------------------------------------! + NL%QUANTUM_EFFICIENCY_T = 0 + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! N_PLANT_LIM -- This controls whether plant photosynthesis can be limited by nitrogen. ! + ! 0. No limitation ! + ! 1. Activate nitrogen limitation model. As of ED-2.2, this option has ! + ! not been thoroughly tested in the tropics. ! + !---------------------------------------------------------------------------------------! + NL%N_PLANT_LIM = 0 + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! N_DECOMP_LIM -- This controls whether decomposition can be limited by nitrogen. ! + ! 0. No limitation ! + ! 1. Activate nitrogen limitation model. As of ED-2.2, this option has ! + ! not been thoroughly tested in the tropics. ! + !---------------------------------------------------------------------------------------! + NL%N_DECOMP_LIM = 0 + !---------------------------------------------------------------------------------------! + + + + + !---------------------------------------------------------------------------------------! + ! The following parameters adjust the fire disturbance in the model. ! + ! INCLUDE_FIRE -- Which threshold to use for fires. ! + ! 0. No fires; ! + ! 1. (deprecated) Fire will be triggered with enough fuel (assumed ! + ! to be total above-ground biomass) and integrated ground water ! + ! depth less than a threshold. Based on ED-1, the threshold ! + ! assumes that the soil is 1 m, so deeper soils will need to be ! + ! much drier to allow fires to happen. ! + ! 2. (ED-2.2 default) Fire will be triggered with enough biomass and ! + ! the total soil water at the top 50 cm falls below a threshold. ! + ! 3. (Under development) This will eventually become SPITFIRE and/or ! + ! HESFIRE. Currently this is similar to 2, except that fuel is ! + ! defined as above-ground litter and coarse woody debris, ! + ! grasses, and trees shorter than 2 m. Ignitions are currently ! + ! restricted to areas with human presence (i.e. any non-natural ! + ! patch). ! + ! FIRE_PARAMETER -- If fire happens, this will control the intensity of the disturbance ! + ! given the amount of fuel. ! + ! SM_FIRE -- This is used only when INCLUDE_FIRE = 2 or 3, and it has different ! + ! meanings. The sign here matters. ! + ! When INCLUDE_FIRE = 2: ! + ! >= 0. - Minimum relative soil moisture above dry air of the top ! + ! soil layers that will prevent fires to happen. ! + ! < 0. - Minimum mean soil moisture potential in MPa of the top ! + ! soil layers that will prevent fires to happen. Although ! + ! this variable can be as negative as -3.1 MPa (residual ! + ! soil water), it is recommended that SM_FIRE > -1.5 MPa ! + ! (wilting point), otherwise fires may never occur. ! + !---------------------------------------------------------------------------------------! + NL%INCLUDE_FIRE = 0 + NL%FIRE_PARAMETER = 0.5 + NL%SM_FIRE = -1.4 + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! IANTH_DISTURB -- This flag controls whether to include anthropogenic disturbances ! + ! such as land clearing, abandonment, and logging. ! + ! 0. No anthropogenic disturbance. ! + ! 1. Use anthropogenic disturbance dataset (ED-2.2 default when ! + ! anthropogenic disturbance is sought). ! + ! 2. Site-specific forest plantation or selective logging cycle. ! + ! (Longo et al., in prep.) (Beta) ! + ! ! + ! The following variables are used only when IANTH_DISTURB is 2. ! + ! ! + ! SL_SCALE -- This flag assumes whether the simulation scale is local or ! + ! landscape. This controls the recurrence of logging. ! + ! 0. Local. The simulation represents one logging unit. Apply ! + ! logging only once every SL_NYRS ! + ! 1. Landscape. The simulation represents a landscape. Logging ! + ! occurs every year but it is restricted to patches with age ! + ! greater than or equal to SL_NYRS ! + ! SL_YR_FIRST -- The first year to apply logging. In case IANTH_DISTURB is 2 it ! + ! must be a simulation year (i.e. between IYEARA and IYEARZ). ! + ! SL_NYRS -- This variable defines the logging cycle, in years (see variable ! + ! SL_SCALE above) ! + ! SL_PFT -- PFTs that can be harvested. ! + ! SL_PROB_HARVEST -- Logging intensity (one value for each PFT provided in SL_PFT). ! + ! Values should be between 0.0 and 1.0, with 0 meaning no ! + ! removal, and 1 removal of all trees needed to meet demands. ! + ! SL_MINDBH_HARVEST -- Minimum DBH for logging (one value for each PFT provided in ! + ! SL_PFT). ! + ! SL_BIOMASS_HARVEST -- Target biomass to be harvested in each cycle, in kgC/m2. If ! + ! zero, then all trees that meet the minimum DBH and minimum ! + ! patch age will be logged. In case you don't want logging to ! + ! occur, don't set this value to zero! Instead, set IANTH_DISTURB ! + ! to zero. ! + ! ! + ! The following variables are used when IANTH_DISTURB is 1 or 2. ! + ! ! + ! SL_SKID_REL_AREA -- area damaged by skid trails (relative to felled area). ! + ! SL_SKID_S_GTHARV -- survivorship of trees with DBH > MINDBH in skid trails. ! + ! SL_SKID_S_LTHARV -- survivorship of trees with DBH < MINDBH in skid trails. ! + ! SL_FELLING_S_LTHARV -- survivorship of trees with DBH < MINDBH in felling gaps. ! + ! ! + ! Cropland variables, used when IANTH_DISTURB is 1 or 2. ! + ! ! + ! CL_FSEEDS_HARVEST -- fraction of seeds that is harvested. ! + ! CL_FSTORAGE_HARVEST -- fraction of non-structural carbon that is harvested. ! + ! CL_FLEAF_HARVEST -- fraction of leaves that is harvested in croplands. ! + !---------------------------------------------------------------------------------------! + NL%IANTH_DISTURB = 0 + NL%SL_SCALE = 1 + NL%SL_SCALE = 0 + NL%SL_YR_FIRST = 1992 + NL%SL_NYRS = 50 + NL%SL_PFT = 2,3,4 + NL%SL_PROB_HARVEST = 1.0,1.0,1.0 + NL%SL_MINDBH_HARVEST = 50.,50.,50. + NL%SL_BIOMASS_HARVEST = 0 + NL%SL_SKID_REL_AREA = 1 + NL%SL_SKID_S_GTHARV = 1 + NL%SL_SKID_S_LTHARV = 0.6 + NL%SL_FELLING_S_LTHARV = 0.35 + NL%CL_FSEEDS_HARVEST = 0.75 + NL%CL_FSTORAGE_HARVEST = 0.00 + NL%CL_FLEAF_HARVEST = 0.00 + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! ICANTURB -- This flag controls the canopy roughness. ! + ! ! + ! 0. (Legacy) Based on Leuning et al. (Oct 1995, Plant Cell Environ.) and LEAF-3 ! + ! (Walko et al. 2000, J. Appl. Meteorol.). Roughness and displacement height are ! + ! found using simple relations with vegetation height; wind is computed using the ! + ! similarity theory for the top cohort, then it is assumed that wind extinguishes ! + ! following an exponential decay with "perceived" cumulative LAI (local LAI with ! + ! finite crown area). ! + ! 1. (Legacy) Similar to option 0, but the wind profile is not based on LAI, instead ! + ! it used the cohort height. ! + ! 2. (ED-2.2 default) This uses the method of Massman (1997, Boundary-Layer Meteorol.) ! + ! assuming constant drag and no sheltering factor. ! + ! 3. (ED-2.2 alternative) This is based on Massman and Weil (1999, Boundary-Layer ! + ! Meteorol.). Similar to 2, but with the option of varying the drag and sheltering ! + ! within the canopy. ! + ! 4. Similar to 0, but if finds the ground conductance following CLM-4.5 technical ! + ! note (Oleson et al. 2013, NCAR/TN-503+STR) (equations 5.98-5.100). ! + !---------------------------------------------------------------------------------------! + NL%ICANTURB = 2 + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! ISFCLYRM -- Similarity theory model. The model that computes u*, T*, etc... ! + ! 1. (Legacy) BRAMS default, based on Louis (1979, Boundary-Layer Meteorol.). It uses ! + ! empirical relations to estimate the flux based on the bulk Richardson number. ! + ! ! + ! All models below use an interative method to find z/L, and the only change ! + ! is the functional form of the psi functions. ! + ! ! + ! 2. (Legacy) Oncley and Dudhia (1995) model, based on MM5. ! + ! 3. (ED-2.2 default) Beljaars and Holtslag (1991) model. Similar to 2, but it uses an ! + ! alternative method for the stable case that mixes more than the OD95. ! + ! 4. (Beta) CLM-based (Oleson et al. 2013, NCAR/TN-503+STR). Similar to options 2 ! + ! and 3, but it uses special functions to deal with very stable and very stable ! + ! cases. It also accounts for different roughness lengths between momentum and ! + ! heat. ! + !---------------------------------------------------------------------------------------! + NL%ISFCLYRM = 3 + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! IED_GRNDVAP -- Methods to find the ground -> canopy conductance. ! + ! 0. (ED-2.2 default) Modified Lee and Pielke (1992, J. Appl. Meteorol.), adding ! + ! field capacity, but using beta factor without the square, like in ! + ! Noilhan and Planton (1989, Mon. Wea. Rev.). ! + ! 1. (Legacy) Test # 1 of Mahfouf and Noilhan (1991, J. Appl. Meteorol.). ! + ! 2. (Legacy) Test # 2 of Mahfouf and Noilhan (1991, J. Appl. Meteorol.). ! + ! 3. (Legacy) Test # 3 of Mahfouf and Noilhan (1991, J. Appl. Meteorol.). ! + ! 4. (Legacy) Test # 4 of Mahfouf and Noilhan (1991, J. Appl. Meteorol.). ! + ! 5. (Legacy) Combination of test #1 (alpha) and test #2 (soil resistance). ! + ! In all cases the beta term is modified so it approaches zero as soil moisture goes ! + ! to dry air soil. ! + !---------------------------------------------------------------------------------------! + NL%IED_GRNDVAP = 0 + !---------------------------------------------------------------------------------------! + + + + + !---------------------------------------------------------------------------------------! + ! These variables will be eventually removed from ED2IN, use XML initialisation file ! + ! to set these parameters instead. These variables are used to control the similarity ! + ! theory model. For the meaning of these parameters, check Beljaars and Holtslag ! + ! (1991, J. Appl. Meteorol.). ! + ! ! + ! GAMM -- gamma coefficient for momentum, unstable case (dimensionless) ! + ! Ignored when ISTAR = 1 ! + ! GAMH -- gamma coefficient for heat, unstable case (dimensionless) ! + ! Ignored when ISTAR = 1 ! + ! TPRANDTL -- Turbulent Prandtl number ! + ! Ignored when ISTAR = 1 ! + ! RIBMAX -- maximum bulk Richardson number. ! + ! LEAF_MAXWHC -- Maximum water that can be intercepted by leaves, in kg/m2leaf. ! + !---------------------------------------------------------------------------------------! + NL%GAMM = 13.0 + NL%GAMH = 13.0 + NL%TPRANDTL = 0.74 + NL%RIBMAX = 0.50 + NL%LEAF_MAXWHC = 0.11 + !---------------------------------------------------------------------------------------! + + + + + !---------------------------------------------------------------------------------------! + ! IPERCOL -- This controls percolation and infiltration. ! + ! 0. (ED-2.2 default) Based on LEAF-3 (Walko et al. 2000, J. Appl. ! + ! Meteorol.). This assumes soil conductivity constant and for the ! + ! temporary surface water, it sheds liquid in excess of a 1:9 liquid- ! + ! -to-ice ratio through percolation. Temporary surface water exists ! + ! only if the top soil layer is at saturation. ! + ! 1. (Beta). Constant soil conductivity, and it uses the percolation ! + ! model as in Anderson (1976, NOAA technical report NWS 19). Temporary ! + ! surface water may exist after a heavy rain event, even if the soil ! + ! doesn't saturate. ! + ! 2. (Beta). Similar to 1, but soil conductivity decreases with depth even ! + ! for constant soil moisture. ! + !---------------------------------------------------------------------------------------! + NL%IPERCOL = 0 + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! The following variables control the plant functional types (PFTs) that will be ! + ! used in this simulation. ! + ! ! + ! INCLUDE_THESE_PFT -- which PFTs to be considered for the simulation. ! + ! PASTURE_STOCK -- which PFT should be used for pastures ! + ! (used only when IANTH_DISTURB = 1 or 2) ! + ! AGRI_STOCK -- which PFT should be used for agriculture ! + ! (used only when IANTH_DISTURB = 1 or 2) ! + ! PLANTATION_STOCK -- which PFT should be used for plantation ! + ! (used only when IANTH_DISTURB = 1 or 2) ! + ! ! + ! PFT table ! + !---------------------------------------------------------------------------------------! + ! 1 - C4 Grass ! + ! 2 - Tropical broadleaf, early successional ! + ! 3 - Tropical broadleaf, mid-successional ! + ! 4 - Tropical broadleaf, late successional ! + ! 5 - Temperate C3 grass ! + ! 6 - Northern North American pines ! + ! 7 - Southern North American pines ! + ! 8 - Late-successional North American conifers ! + ! 9 - Temperate broadleaf, early successional ! + ! 10 - Temperate broadleaf, mid-successional ! + ! 11 - Temperate broadleaf, late successional ! + ! 12 - (Beta) Tropical broadleaf, early successional (thick bark) ! + ! 13 - (Beta) Tropical broadleaf, mid-successional (thick bark) ! + ! 14 - (Beta) Tropical broadleaf, late successional (thick bark) ! + ! 15 - Araucaria ! + ! 16 - Tropical/subtropical C3 grass ! + ! 17 - (Beta) Lianas ! + !---------------------------------------------------------------------------------------! + NL%INCLUDE_THESE_PFT = 1,2,3,4,16 + NL%PASTURE_STOCK = 1 + NL%AGRI_STOCK = 1 + NL%PLANTATION_STOCK = 3 + !---------------------------------------------------------------------------------------! + + + + + !---------------------------------------------------------------------------------------! + ! PFT_1ST_CHECK -- What to do if the initialisation file has a PFT that is not listed ! + ! in INCLUDE_THESE_PFT (ignored if IED_INIT_MODE is -1 or 0) ! + ! 0. Stop the run ! + ! 1. Add the PFT in the INCLUDE_THESE_PFT list ! + ! 2. Ignore the cohort ! + !---------------------------------------------------------------------------------------! + NL%PFT_1ST_CHECK = 0 + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! The following variables control the size of sub-polygon structures in ED-2. ! + ! IFUSION -- Control on patch/cohort fusion scheme ! + ! 0. (ED-2.2 default). This is the original ED-2 scheme. This will ! + ! be eventually superseded by IFUSION=1. ! + ! 1. (Beta) New scheme, developed to address a few issues that ! + ! become more evident when initialising ED with large (>1000) ! + ! number of patches. It uses absolute difference in light levels ! + ! to avoid fusing patches with very different canopies, and also ! + ! makes sure that remaining patches have area above ! + ! MIN_PATCH_AREA and that a high percentage of the original ! + ! landscape is retained. ! + ! ! + ! MAXSITE -- This is the strict maximum number of sites that each polygon can ! + ! contain. Currently this is used only when the user wants to run ! + ! the same polygon with multiple soil types. If there aren't that ! + ! many different soil types with a minimum area (check MIN_SITE_AREA ! + ! below), then the model will allocate just the amount needed. ! + ! MAXPATCH -- A variable controlling the sought number of patches per site. ! + ! Possible values are: ! + ! 0. Disable any patch fusion. This may lead to a large number ! + ! of patches in century-long simulations. ! + ! 1. The model will force fusion until the total number of ! + ! patches is 1 for each land use type. ! + ! -1. Similar to 1, but fusion will only happen during ! + ! initialisation ! + ! >= 2. The model will seek fusion of patches every year, aiming to ! + ! keep the number of patches below NL%MAXPATCH. ! + ! <= -2. Similar to >= 2, but fusion will only happen during ! + ! initialisation. The target number of patches will be the ! + ! absolute number of NL%MAXPATCH. ! + ! ! + ! IMPORTANT: A given site may contain more patches than MAXPATCH in ! + ! case the patches are so different that they cannot be ! + ! fused even when the fusion threshold is relaxed. ! + ! ! + ! MAXCOHORT -- A variable controlling the sought number of cohorts per patch. ! + ! Possible values are: ! + ! 0. Disable cohort fusion. This may lead to a large number of ! + ! cohorts in century-long simulations. ! + ! >= 1. The model will seek fusion of cohorts every month, aiming to ! + ! keep the number of cohorts per patch below MAXCOHORT. ! + ! <= -1. Similar to >= 1, but fusion will only happen during ! + ! initialisation. The target number of cohorts will be the ! + ! absolute number of MAXCOHORT. ! + ! ! + ! IMPORTANT: A given patch may contain more cohorts than MAXCOHORT in ! + ! case the cohorts are so different that they cannot be ! + ! fused even when the fusion threshold is relaxed. ! + ! ! + ! MIN_SITE_AREA -- This is the minimum fraction area of a given soil type that allows ! + ! a site to be created. ! + ! ! + ! MIN_PATCH_AREA -- This is the minimum fraction area of a given soil type that allows ! + ! a site to be created (ignored if IED_INIT_MODE is set to 3). ! + ! IMPORTANT: This is not enforced by the model, but we recommend that ! + ! MIN_PATCH_AREA >= 1/MAXPATCH, otherwise the model may ! + ! never reach MAXPATCH. ! + !---------------------------------------------------------------------------------------! + NL%IFUSION = 0 + NL%MAXSITE = 1 + NL%MAXPATCH = 30 + NL%MAXCOHORT = 80 + NL%MIN_SITE_AREA = 0.001 + NL%MIN_PATCH_AREA = 0.001 + !---------------------------------------------------------------------------------------! + + + + + !---------------------------------------------------------------------------------------! + ! ZROUGH -- Roughness length [metres] of non-vegetated soil. This variable will be ! + ! eventually removed from ED2IN, use XML initialisation file to set this ! + ! parameter instead. ! + !---------------------------------------------------------------------------------------! + NL%ZROUGH = 0.1 + !---------------------------------------------------------------------------------------! + + + + + !---------------------------------------------------------------------------------------! + ! Treefall disturbance parameters. ! + ! TREEFALL_DISTURBANCE_RATE -- Sign-dependent treefall disturbance rate: ! + ! > 0. usual disturbance rate, in 1/years; ! + ! = 0. No treefall disturbance; ! + ! TIME2CANOPY -- Minimum patch age for treefall disturbance to happen. ! + ! If TREEFALL_DISTURBANCE_RATE = 0., this value will be ! + ! ignored. If this value is different than zero, then ! + ! TREEFALL_DISTURBANCE_RATE is internally adjusted so the ! + ! average patch age is still 1/TREEFALL_DISTURBANCE_RATE ! + !---------------------------------------------------------------------------------------! + NL%TREEFALL_DISTURBANCE_RATE = 0.0125 + NL%TIME2CANOPY = 0 + !---------------------------------------------------------------------------------------! + + + + + !---------------------------------------------------------------------------------------! + ! RUNOFF_TIME -- In case a temporary surface water (TSW) is created, this is the "e- ! + ! -folding lifetime" of the TSW in seconds due to runoff. If you don't ! + ! want runoff to happen, set this to 0. ! + !---------------------------------------------------------------------------------------! + NL%RUNOFF_TIME = 3600. + !---------------------------------------------------------------------------------------! + + + + + !---------------------------------------------------------------------------------------! + ! These variables will be eventually removed from ED2IN, use XML initialisation ! + ! file to set these parameters instead. ! + ! ! + ! The following variables control the minimum values of various velocities in the ! + ! canopy. This is needed to avoid the air to be extremely still, or to avoid singular- ! + ! ities. When defining the values, keep in mind that UBMIN >= UGBMIN >= USTMIN. ! + ! ! + ! UBMIN -- minimum wind speed at the top of the canopy air space [ m/s] ! + ! UGBMIN -- minimum wind speed at the leaf level [ m/s] ! + ! USTMIN -- minimum friction velocity, u*, in m/s. [ m/s] ! + !---------------------------------------------------------------------------------------! + NL%UBMIN = 1.00 + NL%UGBMIN = 0.25 + NL%USTMIN = 0.10 + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! Control parameters for printing to standard output. Any variable can be printed ! + ! to standard output as long as it is one dimensional. Polygon variables have been ! + ! tested, no gaurtantees for other hierarchical levels. Choose any variables that are ! + ! defined in the variable table fill routine in ed_state_vars.f90. Choose the start ! + ! and end index of the polygon,site,patch or cohort. It should work in parallel. The ! + ! indices are global indices of the entire domain. The are printed out in rows of 10 ! + ! columns each. ! + ! ! + ! IPRINTPOLYS -- 0. Do not print information to screen ! + ! 1. Print polygon arrays to screen, use variables described below to ! + ! determine which ones and how ! + ! NPVARS -- Number of variables to be printed ! + ! PRINTVARS -- List of variables to be printed ! + ! PFMTSTR -- The standard fortran format for the prints. One format per variable ! + ! IPMIN -- First polygon (absolute index) to be print ! + ! IPMAX -- Last polygon (absolute index) to print ! + !---------------------------------------------------------------------------------------! + NL%IPRINTPOLYS = 0 + NL%NPVARS = 1 + NL%PRINTVARS = 'AVG_PCPG','AVG_CAN_TEMP','AVG_VAPOR_AC','AVG_CAN_SHV' + NL%PFMTSTR = 'f10.8','f5.1','f7.2','f9.5' + NL%IPMIN = 1 + NL%IPMAX = 60 + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! Variables that control the meteorological forcing. ! + ! ! + ! IMETTYPE -- Format of the meteorological dataset ! + ! 0. (Non-functional) ASCII ! + ! 1. (ED-2.2 default) HDF5 ! + ! ISHUFFLE -- How to choose an year outside the meterorological data range (see ! + ! METCYC1 and METCYCF). ! + ! 0. (ED-2.2 default) Sequentially cycle over years ! + ! 1. (Under development) Randomly pick a year. The sequence of randomly ! + ! picked years will be the same every time the simulation is re-run, ! + ! provided that the initial year and met driver time span remain the ! + ! same. This have been reports that this option is working like ! + ! option 2 (completely random). ! + ! 2. (Beta) Randomly pick the years, choosing a different sequence each ! + ! time the model is run. ! + ! ! + ! IMPORTANT: Regardless of the ISHUFFLE option, the model always use the ! + ! correct year for the period in which meteorological drivers ! + ! exist. ! + ! ! + ! IMETCYC1 -- First year for which meteorological driver files exist. ! + ! IMETCYCF -- Last year for which meteorological driver files exist. In addition, ! + ! the model assumes that files exist for all years between METCYC1 and ! + ! METCYCF. ! + ! IMETAVG -- How the input radiation was originally averaged. You must tell this ! + ! because ED-2.1 can make a interpolation accounting for the cosine of ! + ! zenith angle. ! + ! -1. (Deprecated) I don't know, use linear interpolation. ! + ! 0. No average, the values are instantaneous ! + ! 1. Averages ending at the reference time ! + ! 2. Averages beginning at the reference time ! + ! 3. Averages centred at the reference time ! + ! ! + ! IMPORTANT: The user must obtain the correct information for each ! + ! meteorological driver before running the model, and set ! + ! this variable consistently. Inconsistent settings are ! + ! known to cause numerical instabilities, particularly at ! + ! around sunrise and sunset times. ! + ! ! + ! IMETRAD -- What should the model do with the input short wave radiation? ! + ! 0. (ED-2.2 default, when radiation components were measured) ! + ! Nothing, use it as is. ! + ! 1. (Legacy) Add radiation components together, then use the SiB ! + ! method (Sellers et al. 1986, J. Atmos. Sci) to split radiation ! + ! into the four components (PAR direct, PAR diffuse, NIR direct, ! + ! NIR diffuse). ! + ! 2. (ED-2.2 default when radiation components were not measured) ! + ! Add components then together, then use the method by Weiss and ! + ! Norman (1985, Agric. For. Meteorol.) to split radiation down to ! + ! the four components. ! + ! 3. All radiation goes to diffuse. Useful for theoretical studies ! + ! only. ! + ! 4. All radiation goes to direct, except at night. Useful for ! + ! theoretical studies only. ! + ! 5. (Beta) Add radiation components back together, then split ! + ! radiation to the four components based on clearness index (Bendix ! + ! et al. 2010, Int. J. Biometeorol.). ! + ! INITIAL_CO2 -- Initial value for CO2 in case no CO2 is provided at the meteorological ! + ! driver dataset [Units: umol/mol] ! + !---------------------------------------------------------------------------------------! + NL%IMETTYPE = 1 + NL%ISHUFFLE = 0 + NL%METCYC1 = @MET_START@ + NL%METCYCF = @MET_END@ + NL%IMETAVG = @MET_SOURCE@ + NL%IMETRAD = 5 + NL%INITIAL_CO2 = 410. + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! The following variables control the phenology prescribed from observations: ! + ! ! + ! IPHENYS1 -- First year for spring phenology ! + ! IPHENYSF -- Final year for spring phenology ! + ! IPHENYF1 -- First year for fall/autumn phenology ! + ! IPHENYFF -- Final year for fall/autumn phenology ! + ! PHENPATH -- path and prefix of the prescribed phenology data. ! + ! ! + ! If the years don't cover the entire simulation period, they will be recycled. ! + !---------------------------------------------------------------------------------------! + NL%IPHENYS1 = @PHENOL_START@ + NL%IPHENYSF = @PHENOL_END@ + NL%IPHENYF1 = @PHENOL_START@ + NL%IPHENYFF = @PHENOL_END@ + NL%PHENPATH = '@PHENOL@' + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! These are some additional configuration files. ! + ! IEDCNFGF -- XML file containing additional parameter settings. If you don't have ! + ! one, leave it empty ! + ! EVENT_FILE -- file containing specific events that must be incorporated into the ! + ! simulation. ! + ! PHENPATH -- path and prefix of the prescribed phenology data. ! + !---------------------------------------------------------------------------------------! + NL%IEDCNFGF = '@CONFIGFILE@' + NL%EVENT_FILE = '/mypath/event.xml' + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! Census variables. This is going to create unique census statuses to cohorts, to ! + ! better compare the model with census observations. In case you don't intend to ! + ! compare the model with census data, set up DT_CENSUS to 1., otherwise you may reduce ! + ! cohort fusion. ! + ! DT_CENSUS -- Time between census, in months. Currently the maximum is 60 ! + ! months, to avoid excessive memory allocation. Every time the ! + ! simulation reaches the census time step, all census tags will be ! + ! reset. ! + ! YR1ST_CENSUS -- In which year was the first census conducted? ! + ! MON1ST_CENSUS -- In which month was the first census conducted? ! + ! MIN_RECRUIT_DBH -- Minimum DBH that is measured in the census, in cm. ! + !---------------------------------------------------------------------------------------! + NL%DT_CENSUS = 24 + NL%YR1ST_CENSUS = 2004 + NL%MON1ST_CENSUS = 3 + NL%MIN_RECRUIT_DBH = 10 + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! The following variables are used to control the detailed output for debugging ! + ! purposes. ! + ! ! + ! IDETAILED -- This flag controls the possible detailed outputs, mostly used for ! + ! debugging purposes. Notice that this doesn't replace the normal debug- ! + ! ger options, the idea is to provide detailed output to check bad ! + ! assumptions. The options are additive, and the indices below represent ! + ! the different types of output: ! + ! ! + ! 0 -- (ED-2.2 default) No detailed output. ! + ! 1 -- Detailed budget (every DTLSM) ! + ! 2 -- Detailed photosynthesis (every DTLSM) ! + ! 4 -- Detailed output from the integrator (every HDID) ! + ! 8 -- Thermodynamic bounds for sanity check (every DTLSM) ! + ! 16 -- Daily error stats (which variable caused the time step to shrink) ! + ! 32 -- Allometry parameters, photosynthesis parameters, and minimum and ! + ! maximum sizes (three files, only at the beginning) ! + ! 64 -- Detailed disturbance rate output. Two types of detailed ! + ! transitions will be written (single polygon runs only). ! + ! a. A text file that looks like the .lu files. This is written ! + ! only once, at the beginning of the simulation. ! + ! b. Detailed information about the transition matrix. This is ! + ! written to the standard output (e.g. screen), every time the ! + ! patch dynamics is called. ! + ! ! + ! In case you don't want any detailed output (likely for most runs), set ! + ! IDETAILED to zero. In case you want to generate multiple outputs, add ! + ! the number of the sought options: for example, if you want detailed ! + ! photosynthesis and detailed output from the integrator, set IDETAILED ! + ! to 6 (2 + 4). Any combination of the above outputs is acceptable, al- ! + ! though all but the last produce a sheer amount of txt files, in which ! + ! case you may want to look at variable PATCH_KEEP. ! + ! ! + ! IMPORTANT: The first five options will only work for single site ! + ! simulations, and it is strongly recommended to set ! + ! IVEGT_DYNAMICS to 0. These options generate tons of ! + ! output, so don't try these options with long simulations. ! + ! ! + ! ! + ! PATCH_KEEP -- This option will eliminate all patches except one from the initial- ! + ! isation. This is only used when one of the first five types of ! + ! detailed output is active, otherwise it will be ignored. Options are: ! + ! -2. Keep only the patch with the lowest potential LAI ! + ! -1. Keep only the patch with the highest potential LAI ! + ! 0. Keep all patches. ! + ! > 0. Keep the patch with the provided index. In case the index is ! + ! not valid, the model will crash. ! + !---------------------------------------------------------------------------------------! + NL%IDETAILED = 0 + NL%PATCH_KEEP = 0 + !---------------------------------------------------------------------------------------! + + + + !---------------------------------------------------------------------------------------! + ! GROWTH_RESP_SCHEME -- This flag indicates how growth respiration fluxes are treated. ! + ! ! + ! 0 - (Legacy) Growth respiration is treated as tax on GPP, at pft-specific ! + ! rate given by growth_resp_factor. All growth respiration is treated as ! + ! aboveground wood -> canopy-airspace flux. ! + ! 1 - (ED-2.2 default) Growth respiration is calculated as in 0, but split into ! + ! fluxes entering the CAS from Leaf, Fine Root, Sapwood (above- and below- ! + ! -ground), and Bark (above- and below-ground, only when IALLOM=3), ! + ! proportionally to the biomass of each tissue. This does not affect carbon ! + ! budget at all, it provides greater within-ecosystem flux resolution. ! + !---------------------------------------------------------------------------------------! + NL%GROWTH_RESP_SCHEME = 1 + !---------------------------------------------------------------------------------------! + + + !---------------------------------------------------------------------------------------! + ! STORAGE_RESP_SCHEME -- This flag controls how storage respiration fluxes are treated. ! + ! ! + ! 0 - (Legacy) Storage resp. is aboveground wood -> canopy-airspace flux. ! + ! 1 - (ED-2.2 default) Storage respiration is calculated as in 0, but split into ! + ! fluxes entering the CAS from Leaf, Fine Root, Sapwood (above- and below- ! + ! -ground), and Bark (above- and below-ground, only when IALLOM=3), ! + ! proportionally to the biomass of each tissue. This does not affect carbon ! + ! budget at all, it provides greater within-ecosystem flux resolution. ! + !---------------------------------------------------------------------------------------! + NL%STORAGE_RESP_SCHEME = 1 + !---------------------------------------------------------------------------------------! +$END +!==========================================================================================! +!==========================================================================================! diff --git a/modules/assim.batch/DESCRIPTION b/modules/assim.batch/DESCRIPTION index 122e2da0e83..faddbebc3de 100644 --- a/modules/assim.batch/DESCRIPTION +++ b/modules/assim.batch/DESCRIPTION @@ -42,7 +42,7 @@ Imports: stats, prodlim, MCMCpack, - tmvtnorm, + TruncatedNormal (>= 2.2), udunits2 (>= 0.11), utils, XML, diff --git a/modules/assim.batch/R/hier.mcmc.R b/modules/assim.batch/R/hier.mcmc.R index de8b668c512..6bf159169b4 100644 --- a/modules/assim.batch/R/hier.mcmc.R +++ b/modules/assim.batch/R/hier.mcmc.R @@ -208,11 +208,11 @@ hier.mcmc <- function(settings, gp.stack, nstack = NULL, nmcmc, rng_orig, # propose new site parameter vectors thissite <- g %% nsites if(thissite == 0) thissite <- nsites - proposed <- tmvtnorm::rtmvnorm(1, - mean = mu_site_curr[thissite,], + proposed <- TruncatedNormal::rtmvnorm(1, + mu = mu_site_curr[thissite,], sigma = jcov.arr[,,thissite], - lower = rng_orig[,1], - upper = rng_orig[,2]) + lb = rng_orig[,1], + ub = rng_orig[,2]) mu_site_new <- matrix(rep(proposed, nsites),ncol=nparam, byrow = TRUE) @@ -228,9 +228,9 @@ hier.mcmc <- function(settings, gp.stack, nstack = NULL, nmcmc, rng_orig, # calculate jump probabilities currHR <- sapply(seq_len(nsites), function(v) { - tmvtnorm::dtmvnorm(mu_site_curr[v,], mu_site_new[v,], jcov.arr[,,v], - lower = rng_orig[,1], - upper = rng_orig[,2], log = TRUE) + TruncatedNormal::dtmvnorm(mu_site_curr[v,], mu_site_new[v,], jcov.arr[,,v], + lb = rng_orig[,1], + ub = rng_orig[,2], log = TRUE, B = 1e2) }) # predict new SS @@ -246,9 +246,9 @@ hier.mcmc <- function(settings, gp.stack, nstack = NULL, nmcmc, rng_orig, # calculate jump probabilities newHR <- sapply(seq_len(nsites), function(v) { - tmvtnorm::dtmvnorm(mu_site_new[v,], mu_site_curr[v,], jcov.arr[,,v], - lower = rng_orig[,1], - upper = rng_orig[,2], log = TRUE) + TruncatedNormal::dtmvnorm(mu_site_new[v,], mu_site_curr[v,], jcov.arr[,,v], + lb = rng_orig[,1], + ub = rng_orig[,2], log = TRUE, B = 1e2) }) # Accept/reject with MH rule diff --git a/modules/assim.batch/R/pda.utils.R b/modules/assim.batch/R/pda.utils.R index 2180709a3c2..0763fa465e7 100644 --- a/modules/assim.batch/R/pda.utils.R +++ b/modules/assim.batch/R/pda.utils.R @@ -901,11 +901,11 @@ generate_hierpost <- function(mcmc.out, prior.fn.all, prior.ind.all){ # calculate hierarchical posteriors from mu_global_samp and tau_global_samp hierarchical_samp <- mu_global_samp for(si in seq_len(iter_size)){ - hierarchical_samp[si,] <- tmvtnorm::rtmvnorm(1, - mean = mu_global_samp[si,], + hierarchical_samp[si,] <- TruncatedNormal::rtmvnorm(1, + mu = mu_global_samp[si,], sigma = sigma_global_samp[si,,], - lower = lower_lim, - upper = upper_lim) + lb = lower_lim, + ub = upper_lim) } mcmc.out[[i]]$hierarchical_samp <- hierarchical_samp diff --git a/modules/benchmark/inst/scripts/benchmark.workflow.FATES_BCI.R b/modules/benchmark/inst/scripts/benchmark.workflow.FATES_BCI.R index 2c2d47e08dc..7697cd65205 100644 --- a/modules/benchmark/inst/scripts/benchmark.workflow.FATES_BCI.R +++ b/modules/benchmark/inst/scripts/benchmark.workflow.FATES_BCI.R @@ -28,7 +28,7 @@ settings <- PEcAn.settings::read.settings(settings.file) input_id <- 1000011171 ## 4) Edit Input to associate File ## 5) Verify that PEcAn is able to find and load file -input <- PEcAn.DB::query.file.path(input_id,host_name = "localhost",con = bety$con) +input <- PEcAn.DB::query.file.path(input_id,host_name = "localhost",con = bety) format <- PEcAn.DB::query.format.vars(bety,input_id) field <- PEcAn.benchmark::load_data(input,format) ## 6) Look up variable_id in database diff --git a/modules/data.atmosphere/R/met.process.R b/modules/data.atmosphere/R/met.process.R index e1d8e49c52b..33328c9a306 100644 --- a/modules/data.atmosphere/R/met.process.R +++ b/modules/data.atmosphere/R/met.process.R @@ -87,12 +87,8 @@ met.process <- function(site, input_met, start_date, end_date, model, } # set up connection and host information - bety <- dplyr::src_postgres(dbname = dbparms$dbname, - host = dbparms$host, - user = dbparms$user, - password = dbparms$password) - - con <- bety$con + con <- PEcAn.DB::db.open(dbparms) + on.exit(PEcAn.DB::db.close(con), add = TRUE) username <- ifelse(is.null(input_met$username), "pecan", input_met$username) machine.host <- ifelse(host == "localhost" || host$name == "localhost", PEcAn.remote::fqdn(), host$name) @@ -128,10 +124,10 @@ met.process <- function(site, input_met, start_date, end_date, model, # first attempt at function that designates where to start met.process if (is.null(input_met$id)) { stage <- list(download.raw = TRUE, met2cf = TRUE, standardize = TRUE, met2model = TRUE) - format.vars <- PEcAn.DB::query.format.vars(bety = bety, format.id = register$format$id) # query variable info from format id + format.vars <- PEcAn.DB::query.format.vars(bety = con, format.id = register$format$id) # query variable info from format id } else { stage <- met.process.stage(input.id=input_met$id, raw.id=register$format$id, con) - format.vars <- PEcAn.DB::query.format.vars(bety = bety, input.id = input_met$id) # query DB to get format variable information if available + format.vars <- PEcAn.DB::query.format.vars(bety = con, input.id = input_met$id) # query DB to get format variable information if available # Is there a situation in which the input ID could be given but not the file path? # I'm assuming not right now assign(stage$id.name, @@ -280,7 +276,7 @@ met.process <- function(site, input_met, start_date, end_date, model, con = con, host = host, overwrite = overwrite$met2cf, format.vars = format.vars, - bety = bety) + bety = con) } else { if (! met %in% c("ERA5")) cf.id = input_met$id } @@ -411,11 +407,11 @@ met.process <- function(site, input_met, start_date, end_date, model, ################################################################################################################################# -##' @name db.site.lat.lon -##' @title db.site.lat.lon +##' Look up lat/lon from siteid +##' ##' @export -##' @param site.id -##' @param con +##' @param site.id BeTY ID of site to look up +##' @param con database connection ##' @author Betsy Cowdery db.site.lat.lon <- function(site.id, con) { site <- PEcAn.DB::db.query(paste("SELECT id, ST_X(ST_CENTROID(geometry)) AS lon, ST_Y(ST_CENTROID(geometry)) AS lat FROM sites WHERE id =", @@ -434,20 +430,19 @@ db.site.lat.lon <- function(site.id, con) { ################################################################################################################################# -##' @name browndog.met -##' @description Use browndog to get the met data for a specific model -##' @title get met data from browndog +##' Use browndog to get the met data for a specific model +##' ##' @export -##' @param browndog, list with url, username and password to connect to browndog -##' @param source, the source of the met data, currently only NARR an Ameriflux is supported -##' @param site, site information should have id, lat, lon and name (ameriflux id) -##' @param start_date, start date for result -##' @param end_date, end date for result -##' @param model, model to convert the met data to -##' @param dir, folder where results are stored (in subfolder) -##' @param username, used when downloading data from Ameriflux like sites -##' @param con, database connection -## +##' @param browndog list with url, username and password to connect to browndog +##' @param source the source of the met data, currently only NARR an Ameriflux is supported +##' @param site site information should have id, lat, lon and name (ameriflux id) +##' @param start_date start date for result +##' @param end_date end date for result +##' @param model model to convert the met data to +##' @param dir folder where results are stored (in subfolder) +##' @param username used when downloading data from Ameriflux like sites +##' @param con database connection +##' ##' @author Rob Kooper browndog.met <- function(browndog, source, site, start_date, end_date, model, dir, username, con) { folder <- tempfile("BD-", dir) diff --git a/modules/data.atmosphere/R/met.process.stage.R b/modules/data.atmosphere/R/met.process.stage.R index 41b2c756166..e0ca23964a8 100644 --- a/modules/data.atmosphere/R/met.process.stage.R +++ b/modules/data.atmosphere/R/met.process.stage.R @@ -4,6 +4,7 @@ ##' ##' @param input.id ##' @param raw.id +##' @param con database connection ##' ##' @author Elizabeth Cowdery met.process.stage <- function(input.id, raw.id, con) { diff --git a/modules/data.atmosphere/man/browndog.met.Rd b/modules/data.atmosphere/man/browndog.met.Rd index 18b8bdb63f9..16f5b224972 100644 --- a/modules/data.atmosphere/man/browndog.met.Rd +++ b/modules/data.atmosphere/man/browndog.met.Rd @@ -2,7 +2,7 @@ % Please edit documentation in R/met.process.R \name{browndog.met} \alias{browndog.met} -\title{get met data from browndog} +\title{Use browndog to get the met data for a specific model} \usage{ browndog.met( browndog, @@ -17,23 +17,23 @@ browndog.met( ) } \arguments{ -\item{browndog, }{list with url, username and password to connect to browndog} +\item{browndog}{list with url, username and password to connect to browndog} -\item{source, }{the source of the met data, currently only NARR an Ameriflux is supported} +\item{source}{the source of the met data, currently only NARR an Ameriflux is supported} -\item{site, }{site information should have id, lat, lon and name (ameriflux id)} +\item{site}{site information should have id, lat, lon and name (ameriflux id)} -\item{start_date, }{start date for result} +\item{start_date}{start date for result} -\item{end_date, }{end date for result} +\item{end_date}{end date for result} -\item{model, }{model to convert the met data to} +\item{model}{model to convert the met data to} -\item{dir, }{folder where results are stored (in subfolder)} +\item{dir}{folder where results are stored (in subfolder)} -\item{username, }{used when downloading data from Ameriflux like sites} +\item{username}{used when downloading data from Ameriflux like sites} -\item{con, }{database connection} +\item{con}{database connection} } \description{ Use browndog to get the met data for a specific model diff --git a/modules/data.atmosphere/man/db.site.lat.lon.Rd b/modules/data.atmosphere/man/db.site.lat.lon.Rd index 46aa66c45fd..9a2cfed78f9 100644 --- a/modules/data.atmosphere/man/db.site.lat.lon.Rd +++ b/modules/data.atmosphere/man/db.site.lat.lon.Rd @@ -2,12 +2,17 @@ % Please edit documentation in R/met.process.R \name{db.site.lat.lon} \alias{db.site.lat.lon} -\title{db.site.lat.lon} +\title{Look up lat/lon from siteid} \usage{ db.site.lat.lon(site.id, con) } +\arguments{ +\item{site.id}{BeTY ID of site to look up} + +\item{con}{database connection} +} \description{ -db.site.lat.lon +Look up lat/lon from siteid } \author{ Betsy Cowdery diff --git a/modules/data.atmosphere/man/met.process.stage.Rd b/modules/data.atmosphere/man/met.process.stage.Rd index fe3eaa0122a..cf056773ff7 100644 --- a/modules/data.atmosphere/man/met.process.stage.Rd +++ b/modules/data.atmosphere/man/met.process.stage.Rd @@ -7,7 +7,7 @@ met.process.stage(input.id, raw.id, con) } \arguments{ -\item{raw.id}{} +\item{con}{database connection} } \description{ met.process.stage diff --git a/modules/data.atmosphere/tests/Rcheck_reference.log b/modules/data.atmosphere/tests/Rcheck_reference.log index bf1308c9e6f..b0445673eaf 100644 --- a/modules/data.atmosphere/tests/Rcheck_reference.log +++ b/modules/data.atmosphere/tests/Rcheck_reference.log @@ -289,9 +289,6 @@ Undocumented arguments in documentation object 'closest_xy' Undocumented arguments in documentation object 'daygroup' ‘date’ ‘flx’ -Undocumented arguments in documentation object 'db.site.lat.lon' - ‘site.id’ ‘con’ - Undocumented arguments in documentation object 'debias_met' ‘outfolder’ ‘site_id’ ‘...’ @@ -364,7 +361,7 @@ Undocumented arguments in documentation object 'met.process' ‘browndog’ Undocumented arguments in documentation object 'met.process.stage' - ‘input.id’ ‘con’ + ‘input.id’ ‘raw.id’ Undocumented arguments in documentation object 'met2CF.ALMA' ‘verbose’ diff --git a/modules/data.remote/R/call_MODIS.R b/modules/data.remote/R/call_MODIS.R index 71ecd18e0a6..2cb138472a1 100755 --- a/modules/data.remote/R/call_MODIS.R +++ b/modules/data.remote/R/call_MODIS.R @@ -3,7 +3,7 @@ ##' @name call_MODIS ##' @title call_MODIS ##' @export -##' @param outdir where the output file will be stored. Default is NULL +##' @param outdir where the output file will be stored. Default is NULL and in this case only values are returned. When path is provided values are returned and written to disk. ##' @param var the simple name of the modis dataset variable (e.g. lai) ##' @param site_info Bety list of site info for parsing MODIS data: list(site_id, site_name, lat, ##' lon, time_zone) @@ -35,14 +35,14 @@ ##' lon = 90, ##' time_zone = "UTC") ##' test_modistools <- call_MODIS( -##' outdir = NULL, ##' var = "lai", +##' product = "MOD15A2H", +##' band = "Lai_500m", ##' site_info = site_info, ##' product_dates = c("2001150", "2001365"), +##' outdir = NULL, ##' run_parallel = TRUE, ##' ncores = NULL, -##' product = "MOD15A2H", -##' band = "Lai_500m", ##' package_method = "MODISTools", ##' QC_filter = TRUE, ##' progress = FALSE) @@ -50,12 +50,12 @@ ##' @importFrom foreach %do% %dopar% ##' @author Bailey Morrison ##' -call_MODIS <- function(outdir = NULL, - var, site_info, - product_dates, +call_MODIS <- function(var, product, + band, site_info, + product_dates, + outdir = NULL, run_parallel = FALSE, - ncores = NULL, - product, band, + ncores = NULL, package_method = "MODISTools", QC_filter = FALSE, progress = FALSE) { @@ -244,7 +244,7 @@ call_MODIS <- function(outdir = NULL, output$qc[i] <- substr(convert, nchar(convert) - 2, nchar(convert)) } good <- which(output$qc %in% c("000", "001")) - if (length(good) > 0 || !(is.null(good))) + if (length(good) > 0) { output <- output[good, ] } else { diff --git a/modules/data.remote/inst/bands2lai_snap.py b/modules/data.remote/inst/bands2lai_snap.py new file mode 100644 index 00000000000..86707027432 --- /dev/null +++ b/modules/data.remote/inst/bands2lai_snap.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +""" +Calculates LAI using SNAP. + +Author: Ayush Prasad +""" + +from satellitetools import gee +import satellitetools.biophys_xarray as bio +import geopandas as gpd +import xarray as xr +import os + + +def bands2lai_snap(inputfile, outdir): + """ + Calculates LAI for the input netCDF file and saves it in a new netCDF file. + + Parameters + ---------- + input (str) -- path to the input netCDF file containing bands. + + outdir (str) -- path to the directory where the output file is stored. If specified directory does not exists, it is created. + + Returns + ------- + Nothing: + output netCDF is saved in the specified directory. + + """ + # load the input file + ds_disk = xr.open_dataset(inputfile) + # calculate LAI using SNAP + area = bio.run_snap_biophys(ds_disk, "LAI") + + timeseries = {} + timeseries_variable = ["lai"] + + # if specified output directory does not exist, create it. + if not os.path.exists(outdir): + os.makedirs(outdir, exist_ok=True) + + # creating a timerseries and saving the netCDF file + area.to_netcdf(os.path.join(outdir, area.name + "_lai.nc")) + timeseries[area.name] = gee.xr_dataset_to_timeseries(area, timeseries_variable) \ No newline at end of file diff --git a/modules/data.remote/inst/bands2ndvi.py b/modules/data.remote/inst/bands2ndvi.py new file mode 100644 index 00000000000..216c3b1e77c --- /dev/null +++ b/modules/data.remote/inst/bands2ndvi.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +""" +Calculates NDVI using gee. + +Author: Ayush Prasad +""" + +import xarray as xr +from satellitetools import gee +import geopandas as gpd +import os + + +def bands2ndvi(inputfile, outdir): + """ + Calculates NDVI for the input netCDF file and saves it in a new netCDF file. + + Parameters + ---------- + input (str) -- path to the input netCDF file containing bands. + + outdir (str) -- path to the directory where the output file is stored. If specified directory does not exists, it is created. + + Returns + ------- + Nothing: + output netCDF is saved in the specified directory. + + """ + # load the input file + ds_disk = xr.open_dataset(inputfile) + # calculate NDVI using gee + area = gee.compute_ndvi(ds_disk) + + timeseries = {} + timeseries_variable = ["ndvi"] + + # if specified output directory does not exist, create it. + if not os.path.exists(outdir): + os.makedirs(outdir, exist_ok=True) + + # creating a timerseries and saving the netCDF file + area.to_netcdf(os.path.join(outdir, area.name + "_ndvi.nc")) + timeseries[area.name] = gee.xr_dataset_to_timeseries(area, timeseries_variable) diff --git a/modules/data.remote/inst/gee2pecan_bands.py b/modules/data.remote/inst/gee2pecan_bands.py new file mode 100644 index 00000000000..3c325784962 --- /dev/null +++ b/modules/data.remote/inst/gee2pecan_bands.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +""" +Downloads ESA Sentinel 2, Level-2A Bottom of Atmosphere data and saves it in a netCDF file. +Bands retrieved: B3, B4, B5, B6, B7, B8A, B11 and B12 +More information about the bands and the process followed to get the data can be found out at /satellitetools/geeapi.py + +Warning: Point coordinates as input has currently not been implemented. + +Requires Python3 + +Uses satellitetools created by Olli Nevalainen. + +Author: Ayush Prasad +""" + +from satellitetools import gee +import geopandas as gpd +import os + + +def gee2pecan_bands(geofile, outdir, start, end, qi_threshold): + """ + Downloads Sentinel 2 data from gee and saves it in a netCDF file at the specified location. + + Parameters + ---------- + geofile (str) -- path to the file containing the name and coordinates of ROI, currently tested with geojson. + + outdir (str) -- path to the directory where the output file is stored. If specified directory does not exists, it is created. + + start (str) -- starting date of the data request in the form YYYY-MM-DD + + end (str) -- ending date of the data request in the form YYYY-MM-DD + + qi_threshold (float) -- From satellitetools: Threshold value to filter images based on used qi filter. qi filter holds labels of classes whose percentages within the AOI is summed. If the sum is larger then the qi_threshold, data will not be retrieved for that date/image. The default is 1, meaning all data is retrieved + + Returns + ------- + Nothing: + output netCDF is saved in the specified directory. + + Python dependencies required: earthengine-api, geopandas, pandas, netCDF4, xarray + To test this function please run the following code inside a python shell after importing this module, testfile is included. + + gee2pecan_bands(geofile="./satellitetools/test.geojson", outdir="./out/", start="2019-01-01", end="2019-12-31", qi_threshold=1) + """ + + # read in the input file containing coordinates + df = gpd.read_file(geofile) + + request = gee.S2RequestParams(start, end) + + # filter area of interest from the coordinates in the input file + area = gee.AOI(df[df.columns[0]].iloc[0], df[df.columns[1]].iloc[0]) + + # calcuate qi attribute for the AOI + gee.ee_get_s2_quality_info(area, request) + + # get the final data + gee.ee_get_s2_data(area, request, qi_threshold=qi_threshold) + + # convert dataframe to an xarray dataset, used later for converting to netCDF + gee.s2_data_to_xarray(area, request) + + # if specified output directory does not exist, create it + if not os.path.exists(outdir): + os.makedirs(outdir, exist_ok=True) + + # create a timerseries and save the netCDF file + area.data.to_netcdf(os.path.join(outdir, area.name + "_bands.nc")) diff --git a/modules/data.remote/inst/gee2pecan_smap.py b/modules/data.remote/inst/gee2pecan_smap.py new file mode 100644 index 00000000000..5ea7b62b455 --- /dev/null +++ b/modules/data.remote/inst/gee2pecan_smap.py @@ -0,0 +1,152 @@ +""" +Downloads SMAP Global Soil Moisture Data from Google Earth Engine and saves it in a netCDF file. + +Requires Python3 + +Author: Ayush Prasad +""" + +import ee +import pandas as pd +import geopandas as gpd +import os +import xarray as xr +import datetime + +ee.Initialize() + + +def gee2pecan_smap(geofile, outdir, start, end, var): + """ + Downloads and saves SMAP data from GEE + + Parameters + ---------- + geofile (str) -- path to the geosjon file containing the name and coordinates of ROI + + outdir (str) -- path to the directory where the output file is stored. If specified directory does not exists, it is created. + + start (str) -- starting date of the data request in the form YYYY-MM-dd + + end (str) -- ending date areaof the data request in the form YYYY-MM-dd + + var (str) -- one of ssm, susm, smp, ssma, susma + + Returns + ------- + Nothing: + output netCDF is saved in the specified directory + """ + + # read in the geojson file + df = gpd.read_file(geofile) + + if (df.geometry.type == "Point").bool(): + # extract coordinates + lon = float(df.geometry.x) + lat = float(df.geometry.y) + # create geometry + geo = ee.Geometry.Point(lon, lat) + + elif (df.geometry.type == "Polygon").bool(): + # extract coordinates + area = [ + list(df.geometry.exterior[row_id].coords) for row_id in range(df.shape[0]) + ] + # create geometry + geo = ee.Geometry.Polygon(area) + + else: + # if the input geometry type is not + raise ValueError("geometry type not supported") + + def smap_ts(geo, start, end, var): + # extract a feature from the geometry + features = [ee.Feature(geo)] + # create a feature collection from the features + featureCollection = ee.FeatureCollection(features) + + def smap_ts_feature(feature): + area = feature.geometry() + # create the image collection + collection = ( + ee.ImageCollection("NASA_USDA/HSL/SMAP_soil_moisture") + .filterBounds(area) + .filterDate(start, end) + .select([var]) + ) + + def smap_ts_image(img): + # scale (int) Default: 30 + scale = 30 + # extract date from the image + dateinfo = ee.Date(img.get("system:time_start")).format("YYYY-MM-dd") + # reduce the region to a list, can be configured as per requirements + img = img.reduceRegion( + reducer=ee.Reducer.toList(), + geometry=area, + maxPixels=1e8, + scale=scale, + ) + # store data in an ee.Array + smapdata = ee.Array(img.get(var)) + tmpfeature = ( + ee.Feature(ee.Geometry.Point([0, 0])) + .set("smapdata", smapdata) + .set("dateinfo", dateinfo) + ) + return tmpfeature + + # map tmpfeature over the image collection + smap_timeseries = collection.map(smap_ts_image) + return feature.set( + "smapdata", smap_timeseries.aggregate_array("smapdata") + ).set("dateinfo", smap_timeseries.aggregate_array("dateinfo")) + + # map feature over featurecollection + featureCollection = featureCollection.map(smap_ts_feature).getInfo() + return featureCollection + + fc = smap_ts(geo=geo, start=start, end=end, var=var) + + def fc2dataframe(fc): + smapdatalist = [] + datelist = [] + # extract var and date data from fc dictionary and store in it in smapdatalist and datelist + for i in range(len(fc["features"][0]["properties"]["smapdata"])): + smapdatalist.append(fc["features"][0]["properties"]["smapdata"][i][0]) + datelist.append( + datetime.datetime.strptime( + (fc["features"][0]["properties"]["dateinfo"][i]).split(".")[0], + "%Y-%m-%d", + ) + ) + fc_dict = {"date": datelist, var: smapdatalist} + # create a pandas dataframe and store the data + fcdf = pd.DataFrame(fc_dict, columns=["date", var]) + return fcdf + + datadf = fc2dataframe(fc) + + site_name = df[df.columns[0]].iloc[0] + AOI = str(df[df.columns[1]].iloc[0]) + + # convert the dataframe to an xarray dataset, used for converting it to a netCDF file + tosave = xr.Dataset( + datadf, + attrs={ + "site_name": site_name, + "start_date": start, + "end_date": end, + "AOI": AOI, + "product": var, + }, + ) + + # # if specified output path does not exist create it + if not os.path.exists(outdir): + os.makedirs(outdir, exist_ok=True) + + file_name = "_" + var + # convert to netCDF and save the file + tosave.to_netcdf(os.path.join(outdir, site_name + file_name + ".nc")) diff --git a/modules/data.remote/inst/remote_process.py b/modules/data.remote/inst/remote_process.py new file mode 100644 index 00000000000..2d0de3b42f8 --- /dev/null +++ b/modules/data.remote/inst/remote_process.py @@ -0,0 +1,118 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +""" +remote_process controls the individual functions to create an automatic workflow for downloading and performing computation on remote sensing data. + +Requires Python3 + +Author: Ayush Prasad +""" + +from gee2pecan_bands import gee2pecan_bands +from bands2ndvi import bands2ndvi +from bands2lai_snap import bands2lai_snap +from satellitetools import gee +import geopandas as gpd + + +def remote_process( + geofile, + outdir, + start, + end, + qi_threshold, + source="gee", + collection="COPERNICUS/S2_SR", + input_type="bands", + output=["lai", "ndvi"], +): + """ + Parameters + ---------- + geofile (str) -- path to the file containing the name and coordinates of ROI, currently tested with geojson. + + outdir (str) -- path to the directory where the output file is stored. If specified directory does not exists, it is created. + + start (str) -- starting date of the data request in the form YYYY-MM-DD + + end (str) -- ending date areaof the data request in the form YYYY-MM-DD + + qi_threshold (float) -- Threshold value to filter images based on used qi filter. qi filter holds labels of classes whose percentages within the AOI is summed. If the sum is larger then the qi_threshold, data will not be retrieved for that date/image. The default is 1, meaning all data is retrieved + + source (str) -- source from where data is to be downloaded + + collection (str) -- dataset ID + + input_type (str) -- type of raw intermediate data + + output (list of str) -- type of output data requested + + Returns + ------- + Nothing: + output netCDF is saved in the specified directory. + + Python dependencies required: earthengine-api, geopandas, pandas, netCDF4, xarray + + To test this function run: python3 remote_process.py + """ + + # this part will be removed in the next version, after deciding whether to pass the file or the extracted data to initial functions + df = gpd.read_file(geofile) + area = gee.AOI(df[df.columns[0]].iloc[0], df[df.columns[1]].iloc[0]) + + # selecting the initial data download function by concatenating source and input_type + initial_step = "".join([source, "2pecan", input_type]) + if initial_step == "gee2pecanbands": + if collection == "COPERNICUS/S2_SR": + gee2pecan_bands(geofile, outdir, start, end, qi_threshold) + else: + print("other gee download options go here, currently WIP") + # This should be a function being called from an another file + """ + data = ee.ImageCollection(collection) + filtered_data = (data.filterDate(start, end).select(bands).filterBounds(ee.Geometry(pathtofile)) + filtered_data = filtered_data.getInfo() + ... + """ + + else: + print("other sources like AppEEARS go here") + return + + # if raw data is the requested output, process is completed + if input_type == output: + print("process is complete") + + else: + # locate the raw data file formed in initial step + input_file = "".join([outdir, area.name, "_", str(input_type), ".nc"]) + + # store all the requested conversions in a list + conversions = [] + for conv_type in output: + conversions.append("".join([input_type, "2", conv_type])) + + # perform the available conversions + if "bands2lai" in conversions: + print("using SNAP to calculate LAI") + bands2lai_snap(input_file, outdir) + + if "bands2ndvi" in conversions: + print("using GEE to calculate NDVI") + bands2ndvi(input_file, outdir) + + +if __name__ == "__main__": + remote_process( + "./satellitetools/test.geojson", + "./out/", + "2019-01-01", + "2019-12-31", + 1, + "gee", + "COPERNICUS/S2_SR", + "bands", + ["lai", "ndvi"], + ) diff --git a/modules/data.remote/inst/satellitetools/biophys_xarray.py b/modules/data.remote/inst/satellitetools/biophys_xarray.py new file mode 100644 index 00000000000..ca63c18a062 --- /dev/null +++ b/modules/data.remote/inst/satellitetools/biophys_xarray.py @@ -0,0 +1,235 @@ +# -*- coding: utf-8 -*- +""" +Created on Mon May 11 14:34:08 2020 + +@author: Olli Nevalainen (olli.nevalainen@fmi.fi), + Finnish Meteorological Institute) + +Olli's python implementation of ESA SNAP s2toolbox biophysical processor and +computation of vegetation indices. +See ATBD at https://step.esa.int/docs/extra/ATBD_S2ToolBox_L2B_V1.1.pdf +And java source code at +https://github.com/senbox-org/s2tbx/tree/master/s2tbx-biophysical/src/main/java/org/esa/s2tbx/biophysical + +Caveats +Currently changes out of bounds inputs and outputs to nan (or min or max value +if output wihtin tolerance). Maybe output flagging information as well ( i.e. +diffferent flags input and output out of bounds). + +Convex hull input checking currently disabled. It's computationally slow and + not sure of its benefits. Better to filter out bad data based on L2A quality + info/classification\ + and hope averaging removes some bad pixels. +""" + +import requests +import io +import numpy as np +import xarray as xr + +# url to Sentinel 2 Toolbox's auxdata +# This base_url points towards the original toolbox(not the one created by Olli) +base_url = "https://raw.githubusercontent.com/senbox-org/s2tbx/master/s2tbx-biophysical/src/main/resources/auxdata/2_1/{}/{}" + + +def get_fromurl(var, pattern): + """ + Fetches the contents of a text file from the base url and stores it in a ndarray. + + Author: Ayush Prasad + + Parameters + ---------- + var (str) -- type of the product, one of FAPAR, FCOVER, LAI, LAI_Cab and LAI_Cw. + pattern (str) -- name of the file excluding the initial variable part. + + Returns + ------- + ndarray -- loaded with the contents of the text file. + """ + # attach variable and file name to the base url + res_url = base_url.format(var, str(var) + "%s" % str(pattern)) + # make a GET request to the url to fetch the data. + res_url = requests.get(res_url) + # check the HTTP status code to see if any error has occured. + res_url.raise_for_status() + # store the contents of the url in an in-memory buffer and use it to load the ndarray. + return np.loadtxt(io.BytesIO(res_url.content), delimiter=",") + + +# Read SNAP Biophysical processor neural network parameters +nn_params = {} +for var in ["FAPAR", "FCOVER", "LAI", "LAI_Cab", "LAI_Cw"]: + norm_minmax = get_fromurl(var, "_Normalisation") + denorm_minmax = get_fromurl(var, "_Denormalisation") + layer1_weights = get_fromurl(var, "_Weights_Layer1_Neurons") + layer1_bias = get_fromurl(var, "_Weights_Layer1_Bias").reshape(-1, 1) + layer2_weights = get_fromurl(var, "_Weights_Layer2_Neurons").reshape(1, -1) + layer2_bias = get_fromurl(var, "_Weights_Layer2_Bias").reshape(1, -1) + extreme_cases = get_fromurl(var, "_ExtremeCases") + + if var == "FCOVER": + nn_params[var] = { + "norm_minmax": norm_minmax, + "denorm_minmax": denorm_minmax, + "layer1_weights": layer1_weights, + "layer1_bias": layer1_bias, + "layer2_weights": layer2_weights, + "layer2_bias": layer2_bias, + "extreme_cases": extreme_cases, + } + else: + defdom_min = get_fromurl(var, "_DefinitionDomain_MinMax")[0, :].reshape(-1, 1) + defdom_max = get_fromurl(var, "_DefinitionDomain_MinMax")[1, :].reshape(-1, 1) + defdom_grid = get_fromurl(var, "_DefinitionDomain_Grid") + nn_params[var] = { + "norm_minmax": norm_minmax, + "denorm_minmax": denorm_minmax, + "layer1_weights": layer1_weights, + "layer1_bias": layer1_bias, + "layer2_weights": layer2_weights, + "layer2_bias": layer2_bias, + "defdom_min": defdom_min, + "defdom_max": defdom_max, + "defdom_grid": defdom_grid, + "extreme_cases": extreme_cases, + } + + +def _normalization(x, x_min, x_max): + x_norm = 2 * (x - x_min) / (x_max - x_min) - 1 + return x_norm + + +def _denormalization(y_norm, y_min, y_max): + y = 0.5 * (y_norm + 1) * (y_max - y_min) + return y + + +def _input_ouf_of_range(x, variable): + x_copy = x.copy() + x_bands = x_copy[:8, :] + + # check min max domain + defdom_min = nn_params[variable]["defdom_min"][:, 0].reshape(-1, 1) + defdom_max = nn_params[variable]["defdom_max"][:, 0].reshape(-1, 1) + bad_input_mask = (x_bands < defdom_min) | (x_bands > defdom_max) + bad_vector = np.any(bad_input_mask, axis=0) + x_bands[:, bad_vector] = np.nan + + # convex hull check, currently disabled due to time consumption vs benefit + # gridProject = lambda v: np.floor(10 * (v - defdom_min) / (defdom_max - defdom_min) + 1 ).astype(int) + # x_bands = gridProject(x_bands) + # isInGrid = lambda v: any((v == x).all() for x in nn_params[variable]['defdom_grid']) + # notInGrid = ~np.array([isInGrid(v) for v in x_bands.T]) + # x[:,notInGrid | bad_vector] = np.nan + + x_copy[:, bad_vector] = np.nan + return x_copy + + +def _output_ouf_of_range(output, variable): + new_output = np.copy(output) + tolerance = nn_params[variable]["extreme_cases"][0] + output_min = nn_params[variable]["extreme_cases"][1] + output_max = nn_params[variable]["extreme_cases"][2] + + new_output[output < (output_min + tolerance)] = np.nan + new_output[(output > (output_min + tolerance)) & (output < output_min)] = output_min + new_output[(output < (output_max - tolerance)) & (output > output_max)] = output_max + new_output[output > (output_max - tolerance)] = np.nan + return new_output + + +def _compute_variable(x, variable): + + x_norm = np.zeros_like(x) + x = _input_ouf_of_range(x, variable) + x_norm = _normalization( + x, + nn_params[variable]["norm_minmax"][:, 0].reshape(-1, 1), + nn_params[variable]["norm_minmax"][:, 1].reshape(-1, 1), + ) + + out_layer1 = np.tanh( + nn_params[variable]["layer1_weights"].dot(x_norm) + + nn_params[variable]["layer1_bias"] + ) + out_layer2 = ( + nn_params[variable]["layer2_weights"].dot(out_layer1) + + nn_params[variable]["layer2_bias"] + ) + output = _denormalization( + out_layer2, + nn_params[variable]["denorm_minmax"][0], + nn_params[variable]["denorm_minmax"][1], + )[0] + output = _output_ouf_of_range(output, variable) + output = output.reshape(1, np.shape(x)[1]) + return output + + +def run_snap_biophys(dataset, variable): + """Compute specified variable using the SNAP algorithm. + + See ATBD at https://step.esa.int/docs/extra/ATBD_S2ToolBox_L2B_V1.1.pdf + + Parameters + ---------- + dataset : xr dataset + xarray dataset. + variable : str + Options 'FAPAR', 'FCOVER', 'LAI', 'LAI_Cab' or 'LAI_Cw' + + Returns + ------- + xarray dataset + Adds the specified variable array to dataset (variable name in + lowercase). + + """ + # generate view angle bands/layers + vz = ( + np.ones_like(dataset.band_data[:, 0, :, :]).T + * np.cos(np.radians(dataset.view_zenith)).values + ) + vz = vz[..., np.newaxis] + vzarr = xr.DataArray( + vz, + coords=[dataset.y, dataset.x, dataset.time, ["view_zenith"]], + dims=["y", "x", "time", "band"], + ) + + sz = ( + np.ones_like(dataset.band_data[:, 0, :, :]).T + * np.cos(np.radians(dataset.sun_zenith)).values + ) + sz = sz[..., np.newaxis] + szarr = xr.DataArray( + sz, + coords=[dataset.y, dataset.x, dataset.time, ["sun_zenith"]], + dims=["y", "x", "time", "band"], + ) + + raz = ( + np.ones_like(dataset.band_data[:, 0, :, :]).T + * np.cos(np.radians(dataset.sun_azimuth - dataset.view_azimuth)).values + ) + raz = raz[..., np.newaxis] + razarr = xr.DataArray( + raz, + coords=[dataset.y, dataset.x, dataset.time, ["relative_azimuth"]], + dims=["y", "x", "time", "band"], + ) + + newarr = xr.concat([dataset.band_data, vzarr, szarr, razarr], dim="band") + newarr = newarr.stack(xy=("x", "y")) + arr = xr.apply_ufunc( + _compute_variable, + newarr, + input_core_dims=[["band", "xy"]], + output_core_dims=[["xy"]], + kwargs={"variable": variable}, + vectorize=True, + ).unstack() + return dataset.assign({variable.lower(): arr}) diff --git a/modules/data.remote/inst/satellitetools/gee.py b/modules/data.remote/inst/satellitetools/gee.py new file mode 100755 index 00000000000..cb74eb74d5d --- /dev/null +++ b/modules/data.remote/inst/satellitetools/gee.py @@ -0,0 +1,716 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Created on Thu Feb 6 15:24:12 2020 + +Module to retrieve Sentinel-2 data from Google Earth Engine (GEE). +Warning: the data is currently retrieved with 10m resolution (scale=10), so +the 20m resolution bands are resampled. +TODO: Add option for specifying the request spatial resolution. + +@author: Olli Nevalainen (olli.nevalainen@fmi.fi), + Finnish Meteorological Institute) + + +""" +import sys +import os +import ee +import datetime +import pandas as pd +import geopandas as gpd +import numpy as np +import xarray as xr +from functools import reduce + +ee.Initialize() + + +NO_DATA = -99999 +S2_REFL_TRANS = 10000 +# ----------------- Sentinel-2 ------------------------------------- +s2_qi_labels = ['NODATA', + 'SATURATED_DEFECTIVE', + 'DARK_FEATURE_SHADOW', + 'CLOUD_SHADOW', + 'VEGETATION', + 'NOT_VEGETATED', + 'WATER', + 'UNCLASSIFIED', + 'CLOUD_MEDIUM_PROBA', + 'CLOUD_HIGH_PROBA', + 'THIN_CIRRUS', + 'SNOW_ICE'] + +s2_filter1 = ['NODATA', + 'SATURATED_DEFECTIVE', + 'CLOUD_SHADOW', + 'UNCLASSIFIED', + 'CLOUD_MEDIUM_PROBA', + 'CLOUD_HIGH_PROBA', + 'THIN_CIRRUS', + 'SNOW_ICE'] + + +class S2RequestParams(): + """S2 data request paramaters. + + Attributes + ---------- + datestart : str + Starting date for data request in form "2019-01-01". + dateend : str + Starting date for data request in form "2019-12-31". + bands : list, optional + List of strings with band name. + the default is ['B3', 'B4', 'B5', + 'B6', 'B7', 'B8A', 'B11', 'B12']. + """ + + def __init__(self, datestart, dateend, bands=None): + """. + + Parameters + ---------- + datestart : str + Starting date for data request in form "2019-01-01". + dateend : str + Starting date for data request in form "2019-12-31". + bands : list, optional + List of strings with band name. + The default is ['B3', 'B4', 'B5', + 'B6', 'B7', 'B8A', 'B11', 'B12']. + + Returns + ------- + None. + + """ + default_bands = ['B3', 'B4', 'B5', 'B6', 'B7', 'B8A', 'B11', 'B12'] + + self.datestart = datestart + self.dateend = dateend + self.bands = bands if bands else default_bands + + +class AOI(): + """Area of interest for area info and data. + + Attributes + ---------- + name : str + Name of the area. + geometry : str + Geometry of the area of interest e.g. from geopandas. + Currently only polygons tested. The default is None. + coordinate_list : list, optional + List of coordinates of a polygon + (loop should be closed). Computed from geometry if not + provided. The default is None. + tile : str, optional + Tile id as string for the data. Used to keep the data in + same crs because an area can be in multiple tiles with + different crs. The default is None. + qi : pandas dataframe + Dataframe with quality information about available imagery for the AOI. + qi is empty at init and can be computed with + ee_get_s2_quality_info function. + data : pandas dataframe or xarray + Dataframe holding data retrieved from GEE. Data can be computed using + function + qi is empty at init and can be computed with ee_get_s2_data and + converted to xarray using s2_data_to_xarray function. + + Methods + ------- + __init__ + """ + + def __init__(self, name, geometry=None, coordinate_list=None, tile=None): + """. + + Parameters + ---------- + name : str + Name of the area. + geometry : geometry in wkt, optional + Geometry of the area of interest e.g. from geopandas. + Currently only polygons tested. The default is None. + coordinate_list : list, optional + List of coordinates of a polygon + (loop should be closed). Computed from geometry if not + provided. The default is None. + tile : str, optional + Tile id as string for the data. Used to keep the data in + same crs because an area can be in multiple tiles with + different crs. The default is None. + + Returns + ------- + None. + + """ + if not geometry and not coordinate_list: + sys.exit("AOI has to get either geometry or coordinates as list!") + elif geometry and not coordinate_list: + coordinate_list = list(geometry.exterior.coords) + elif coordinate_list and not geometry: + geometry = None + + self.name = name + self.geometry = geometry + self.coordinate_list = coordinate_list + self.qi = None + self.data = None + self.tile = tile + + +def ee_get_s2_quality_info(AOIs, req_params): + """Get S2 quality information from GEE. + + Parameters + ---------- + AOIs : list or AOI instance + List of AOI instances or single AOI instance. If multiple AOIs + proviveded the computation in GEE server is parallellized. + If too many areas with long time range is provided, user might + hit GEE memory limits. Then you should call this function + sequentally to all AOIs. + req_params : S2RequestParams instance + S2RequestParams instance with request details. + + Returns + ------- + Nothing: + Computes qi attribute for the given AOI instances. + + """ + # if single AOI instance, make a list + if isinstance(AOIs, AOI): + AOIs = list([AOIs]) + + features = [ee.Feature( + ee.Geometry.Polygon(a.coordinate_list), + {'name': a.name}) for a in AOIs] + feature_collection = ee.FeatureCollection(features) + + def ee_get_s2_quality_info_feature(feature): + + area = feature.geometry() + image_collection = ee.ImageCollection("COPERNICUS/S2_SR") \ + .filterBounds(area) \ + .filterDate(req_params.datestart, req_params.dateend) \ + .select(['SCL']) + + def ee_get_s2_quality_info_image(img): + productid = img.get('PRODUCT_ID') + assetid = img.id() + tileid = img.get('MGRS_TILE') + system_index = img.get('system:index') + proj = img.select("SCL").projection() + + # apply reducer to list + img = img.reduceRegion( + reducer=ee.Reducer.toList(), + geometry=area, + maxPixels=1e8, + scale=10) + + # get data into arrays + classdata = ee.Array( + ee.Algorithms.If(img.get("SCL"), + ee.Array(img.get("SCL")), + ee.Array([0]))) + + totalcount = classdata.length() + classpercentages = { + key: + classdata.eq(i).reduce(ee.Reducer.sum(), [0]) + .divide(totalcount).get([0]) + for i, key in enumerate(s2_qi_labels)} + + tmpfeature = ee.Feature(ee.Geometry.Point([0, 0])) \ + .set('productid', productid) \ + .set('system_index', system_index) \ + .set('assetid', assetid) \ + .set('tileid', tileid) \ + .set('projection', proj) \ + .set(classpercentages) + return tmpfeature + + s2_qi_image_collection = image_collection.map( + ee_get_s2_quality_info_image) + + return feature \ + .set('productid', s2_qi_image_collection + .aggregate_array('productid')) \ + .set('system_index', s2_qi_image_collection + .aggregate_array('system_index')) \ + .set('assetid', s2_qi_image_collection + .aggregate_array('assetid')) \ + .set('tileid', s2_qi_image_collection + .aggregate_array('tileid')) \ + .set('projection', s2_qi_image_collection + .aggregate_array('projection')) \ + .set({key: s2_qi_image_collection + .aggregate_array(key) for key in s2_qi_labels}) + + s2_qi_feature_collection = feature_collection.map( + ee_get_s2_quality_info_feature).getInfo() + + s2_qi = s2_feature_collection_to_dataframes(s2_qi_feature_collection) + + for a in AOIs: + name = a.name + a.qi = s2_qi[name] + + +def ee_get_s2_data(AOIs, req_params, qi_threshold=0, qi_filter=s2_filter1): + """Get S2 data (level L2A, bottom of atmosphere data) from GEE. + + Warning: the data is currently retrieved with 10m resolution (scale=10), so + the 20m resolution bands are resampled. + TODO: Add option for specifying the request spatial resolution. + + Parameters + ---------- + AOIs : list or AOI instance + List of AOI instances or single AOI instance. If multiple AOIs + proviveded the computation in GEE server is parallellized. + If too many areas with long time range is provided, user might + hit GEE memory limits. Then you should call this function + sequentally to all AOIs. AOIs should have qi attribute computed first. + req_params : S2RequestParams instance + S2RequestParams instance with request details. + qi_threshold : float + Threshold value to filter images based on used qi filter. + qi filter holds labels of classes whose percentages within the AOI + is summed. If the sum is larger then the qi_threhold, data will not be + retrieved for that date/image. The default is 1, meaning all data is + retrieved. + qi_filter : list + List of strings with class labels (of unwanted classes) used to compute qi value, + see qi_threhold. The default is s2_filter1 = ['NODATA', + 'SATURATED_DEFECTIVE', + 'CLOUD_SHADOW', + 'UNCLASSIFIED', + 'CLOUD_MEDIUM_PROBA', + 'CLOUD_HIGH_PROBA', + 'THIN_CIRRUS', + 'SNOW_ICE']. + + Returns + ------- + Nothing: + Computes data attribute for the given AOI instances. + + """ + datestart = req_params.datestart + dateend = req_params. dateend + bands = req_params.bands + # if single AOI instance, make a list + if isinstance(AOIs, AOI): + AOIs = list([AOIs]) + + features = [] + for a in AOIs: + filtered_qi = filter_s2_qi_dataframe(a.qi, qi_threshold, qi_filter) + if len(filtered_qi) == 0: + print('No observations to retrieve for area %s' % a.name) + continue + + if a.tile is None: + min_tile = min(filtered_qi['tileid'].values) + filtered_qi = (filtered_qi[ + filtered_qi['tileid'] == min_tile]) + a.tile = min_tile + else: + filtered_qi = (filtered_qi[ + filtered_qi['tileid'] == a.tile]) + + full_assetids = "COPERNICUS/S2_SR/" + filtered_qi['assetid'] + image_list = [ee.Image(asset_id) for asset_id in full_assetids] + crs = filtered_qi['projection'].values[0]["crs"] + feature = ee.Feature(ee.Geometry.Polygon(a.coordinate_list), + {'name': a.name, + 'image_list': image_list}) + + features.append(feature) + + if len(features) == 0: + print('No data to be retrieved!') + return None + + feature_collection = ee.FeatureCollection(features) + + def ee_get_s2_data_feature(feature): + geom = feature.geometry(0.01, crs) + image_collection = \ + ee.ImageCollection.fromImages(feature.get('image_list')) \ + .filterBounds(geom) \ + .filterDate(datestart, dateend) \ + .select(bands + ['SCL']) + + def ee_get_s2_data_image(img): + # img = img.clip(geom) + productid = img.get('PRODUCT_ID') + assetid = img.id() + tileid = img.get('MGRS_TILE') + system_index = img.get('system:index') + proj = img.select(bands[0]).projection() + sun_azimuth = img.get('MEAN_SOLAR_AZIMUTH_ANGLE') + sun_zenith = img.get('MEAN_SOLAR_ZENITH_ANGLE') + view_azimuth = ee.Array( + [img.get('MEAN_INCIDENCE_AZIMUTH_ANGLE_%s' % b) + for b in bands]) \ + .reduce(ee.Reducer.mean(), [0]).get([0]) + view_zenith = ee.Array( + [img.get('MEAN_INCIDENCE_ZENITH_ANGLE_%s' % b) + for b in bands]) \ + .reduce(ee.Reducer.mean(), [0]).get([0]) + + img = img.resample('bilinear') \ + .reproject(crs=crs, scale=10) + + # get the lat lon and add the ndvi + image_grid = ee.Image.pixelCoordinates( + ee.Projection(crs)) \ + .addBands([img.select(b) for b in bands + ['SCL']]) + + # apply reducer to list + image_grid = image_grid.reduceRegion( + reducer=ee.Reducer.toList(), + geometry=geom, + maxPixels=1e8, + scale=10) + + # get data into arrays + x_coords = ee.Array(image_grid.get("x")) + y_coords = ee.Array(image_grid.get("y")) + band_data = {b: ee.Array(image_grid.get("%s" % b)) for b in bands} + + scl_data = ee.Array(image_grid.get("SCL")) + + # perform LAI et al. computation possibly here! + + tmpfeature = ee.Feature(ee.Geometry.Point([0, 0])) \ + .set('productid', productid) \ + .set('system_index', system_index) \ + .set('assetid', assetid) \ + .set('tileid', tileid) \ + .set('projection', proj) \ + .set('sun_zenith', sun_zenith) \ + .set('sun_azimuth', sun_azimuth) \ + .set('view_zenith', view_zenith) \ + .set('view_azimuth', view_azimuth) \ + .set('x_coords', x_coords) \ + .set('y_coords', y_coords) \ + .set('SCL', scl_data) \ + .set(band_data) + return tmpfeature + + s2_data_feature = image_collection.map(ee_get_s2_data_image) + + return feature \ + .set('productid', s2_data_feature + .aggregate_array('productid')) \ + .set('system_index', s2_data_feature + .aggregate_array('system_index')) \ + .set('assetid', s2_data_feature + .aggregate_array('assetid')) \ + .set('tileid', s2_data_feature + .aggregate_array('tileid')) \ + .set('projection', s2_data_feature + .aggregate_array('projection')) \ + .set('sun_zenith', s2_data_feature + .aggregate_array('sun_zenith')) \ + .set('sun_azimuth', s2_data_feature + .aggregate_array('sun_azimuth')) \ + .set('view_zenith', s2_data_feature + .aggregate_array('view_zenith')) \ + .set('view_azimuth', s2_data_feature + .aggregate_array('view_azimuth')) \ + .set('x_coords', s2_data_feature + .aggregate_array('x_coords')) \ + .set('y_coords', s2_data_feature + .aggregate_array('y_coords')) \ + .set('SCL', s2_data_feature + .aggregate_array('SCL')) \ + .set({b: s2_data_feature + .aggregate_array(b) for b in bands}) + s2_data_feature_collection = feature_collection.map( + ee_get_s2_data_feature).getInfo() + + s2_data = s2_feature_collection_to_dataframes(s2_data_feature_collection) + + for a in AOIs: + name = a.name + a.data = s2_data[name] + +def filter_s2_qi_dataframe(s2_qi_dataframe, qi_thresh, s2_filter=s2_filter1): + """Filter qi dataframe. + + Parameters + ---------- + s2_qi_dataframe : pandas dataframe + S2 quality information dataframe (AOI instance qi attribute). + qi_thresh : float + Threshold value to filter images based on used qi filter. + qi filter holds labels of classes whose percentages within the AOI + is summed. If the sum is larger then the qi_threhold, data will not be + retrieved for that date/image. The default is 1, meaning all data is + retrieved. + s2_filter : list + List of strings with class labels (of unwanted classes) used to compute qi value, + see qi_threhold. The default is s2_filter1 = ['NODATA', + 'SATURATED_DEFECTIVE', + 'CLOUD_SHADOW', + 'UNCLASSIFIED', + 'CLOUD_MEDIUM_PROBA', + 'CLOUD_HIGH_PROBA', + 'THIN_CIRRUS', + 'SNOW_ICE']. + + Returns + ------- + filtered_s2_qi_df : pandas dataframe + Filtered dataframe. + + """ + filtered_s2_qi_df = s2_qi_dataframe.loc[ + s2_qi_dataframe[s2_filter1].sum(axis=1) < qi_thresh] + + return filtered_s2_qi_df + + +def s2_feature_collection_to_dataframes(s2_feature_collection): + """Convert feature collection dict from GEE to pandas dataframe. + + Parameters + ---------- + s2_feature_collection : dict + Dictionary returned by GEE. + + Returns + ------- + dataframes : pandas dataframe + GEE dictinary converted to pandas dataframe. + + """ + dataframes = {} + + for featnum in range(len(s2_feature_collection['features'])): + tmp_dict = {} + key = s2_feature_collection['features'][featnum]['properties']['name'] + productid = (s2_feature_collection + ['features'] + [featnum] + ['properties'] + ['productid']) + + dates = [datetime.datetime.strptime( + d.split('_')[2], '%Y%m%dT%H%M%S') for d in productid] + + tmp_dict.update({'Date': dates}) # , 'crs': crs} + properties = s2_feature_collection['features'][featnum]['properties'] + for prop, data in properties.items(): + if prop not in ['Date'] : # 'crs' ,, 'projection' + tmp_dict.update({prop: data}) + dataframes[key] = pd.DataFrame(tmp_dict) + return dataframes + +def compute_ndvi(dataset): + """Compute NDVI + + Parameters + ---------- + dataset : xarray dataset + + Returns + ------- + xarray dataset + Adds 'ndvi' xr array to xr dataset. + + """ + b4 = dataset.band_data.sel(band='B4') + b8 = dataset.band_data.sel(band='B8A') + ndvi = (b8 - b4) / (b8 + b4) + return dataset.assign({'ndvi': ndvi}) + + + +def s2_data_to_xarray(aoi, request_params, convert_to_reflectance=True): + """Convert AOI.data dataframe to xarray dataset. + + Parameters + ---------- + aoi : AOI instance + AOI instance. + request_params : S2RequestParams + S2RequestParams. + convert_to_reflectance : boolean, optional + Convert S2 data from GEE (integers) to reflectances (floats), + i,e, divide by 10000. + The default is True. + + Returns + ------- + Nothing. + Converts the data atrribute dataframe to xarray Dataset. + xarray is better for handling multiband data. It also has + implementation for saving the data in NetCDF format. + + """ + # check that all bands have full data! + datalengths = [aoi.data[b].apply( + lambda d: len(d)) == len(aoi.data.iloc[0]['x_coords']) + for b in request_params.bands] + consistent_data = reduce(lambda a, b: a & b, datalengths) + aoi.data = aoi.data[consistent_data] + + # 2D data + bands = request_params.bands + + # 1D data + list_vars = ['assetid', 'productid', 'sun_azimuth', + 'sun_zenith', 'system_index', + 'view_azimuth', 'view_zenith'] + + # crs from projection + crs = aoi.data['projection'].values[0]['crs'] + tileid = aoi.data['tileid'].values[0] + # original number of pixels requested (pixels inside AOI) + aoi_pixels = len(aoi.data.iloc[0]['x_coords']) + + # transform 2D data to arrays + for b in bands: + + aoi.data[b] = aoi.data.apply( + lambda row: s2_lists_to_array( + row['x_coords'], row['y_coords'], row[b], + convert_to_reflectance=convert_to_reflectance), axis=1) + + aoi.data['SCL'] = aoi.data.apply( + lambda row: s2_lists_to_array( + row['x_coords'], row['y_coords'], row['SCL'], + convert_to_reflectance=False), axis=1) + + array = aoi.data[bands].values + + # this will stack the array to ndarray with + # dimension order = (time, band, x,y) + narray = np.stack( + [np.stack(array[:, b], axis=2) for b in range(len(bands))], + axis=2).transpose() # .swapaxes(2, 3) + + scl_array = np.stack(aoi.data['SCL'].values, axis=2).transpose() + + coords = {'time': aoi.data['Date'].values, + 'band': bands, + 'x': np.unique(aoi.data.iloc[0]['x_coords']), + 'y': np.unique(aoi.data.iloc[0]['y_coords']) + } + + dataset_dict = {'band_data': (['time', 'band', 'x', 'y'], narray), + 'SCL': (['time', 'x', 'y'], scl_array)} + var_dict = {var: (['time'], aoi.data[var]) for var in list_vars} + dataset_dict.update(var_dict) + + ds = xr.Dataset(dataset_dict, + coords=coords, + attrs={'name': aoi.name, + 'crs': crs, + 'tile_id': tileid, + 'aoi_geometry': aoi.geometry.to_wkt(), + 'aoi_pixels': aoi_pixels}) + aoi.data = ds + + +def s2_lists_to_array(x_coords, y_coords, data, convert_to_reflectance=True): + """Convert 1D lists of coordinates and corresponding values to 2D array. + + Parameters + ---------- + x_coords : list + List of x-coordinates. + y_coords : list + List of y-coordinates. + data : list + List of data values corresponding to the coordinates. + convert_to_reflectance : boolean, optional + Convert S2 data from GEE (integers) to reflectances (floats), + i,e, divide by 10000. + The default is True. + + Returns + ------- + arr : 2D numpy array + Return 2D numpy array. + + """ + # get the unique coordinates + uniqueYs = np.unique(y_coords) + uniqueXs = np.unique(x_coords) + + # get number of columns and rows from coordinates + ncols = len(uniqueXs) + nrows = len(uniqueYs) + + # determine pixelsizes + # ys = uniqueYs[1] - uniqueYs[0] + # xs = uniqueXs[1] - uniqueXs[0] + + y_vals, y_idx = np.unique(y_coords, return_inverse=True) + x_vals, x_idx = np.unique(x_coords, return_inverse=True) + if convert_to_reflectance: + arr = np.empty(y_vals.shape + x_vals.shape, dtype=np.float64) + arr.fill(np.nan) + arr[y_idx, x_idx] = np.array(data, dtype=np.float64) / S2_REFL_TRANS + else: + arr = np.empty(y_vals.shape + x_vals.shape, dtype=np.int32) + arr.fill(NO_DATA) # or whatever yor desired missing data flag is + arr[y_idx, x_idx] = data + arr = np.flipud(arr) + return arr + + +def xr_dataset_to_timeseries(xr_dataset, variables): + """Compute timeseries dataframe from xr dataset. + + Parameters + ---------- + xr_dataset : xarray dataset + + variables : list + list of variable names as string. + + Returns + ------- + df : pandas dataframe + Pandas dataframe with mean, std, se and percentage of NaNs inside AOI. + + """ + df = pd.DataFrame({'Date': pd.to_datetime(xr_dataset.time.values)}) + + for var in variables: + df[var] = xr_dataset[var].mean(dim=['x', 'y']) + df[var+'_std'] = xr_dataset[var].std(dim=['x', 'y']) + + # nans occure due to missging data from 1D to 2D array + #(pixels outside the polygon), + # from snap algorihtm nans occure due to input/output ouf of bounds + # checking. + # TODO: flaggging with snap biophys algorith or some other solution to + # check which nan are from snap algorithm and which from 1d to 2d transformation + nans = np.isnan(xr_dataset[var]).sum(dim=['x', 'y']) + sample_n = len(xr_dataset[var].x) * len(xr_dataset[var].y) - nans + + # compute how many of the nans are inside aoi (due to snap algorithm) + out_of_aoi_pixels = (len(xr_dataset[var].x) * len(xr_dataset[var].y) + - xr_dataset.aoi_pixels) + nans_inside_aoi = nans - out_of_aoi_pixels + df['aoi_nan_percentage'] = nans_inside_aoi / xr_dataset.aoi_pixels + + df[var+'_se'] = df[var+'_std'] / np.sqrt(sample_n) + + return df diff --git a/modules/data.remote/inst/satellitetools/test.geojson b/modules/data.remote/inst/satellitetools/test.geojson new file mode 100644 index 00000000000..9a890595d4c --- /dev/null +++ b/modules/data.remote/inst/satellitetools/test.geojson @@ -0,0 +1,38 @@ +{ + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "properties": { + "name": "Reykjavik" + }, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [ + -21.788935661315918, + 64.04460250271562 + ], + [ + -21.786317825317383, + 64.04460250271562 + ], + [ + -21.786317825317383, + 64.04537258754581 + ], + [ + -21.788935661315918, + 64.04537258754581 + ], + [ + -21.788935661315918, + 64.04460250271562 + ] + ] + ] + } + } + ] +} diff --git a/modules/data.remote/man/call_MODIS.Rd b/modules/data.remote/man/call_MODIS.Rd index cf54ceaf091..52fc0422110 100644 --- a/modules/data.remote/man/call_MODIS.Rd +++ b/modules/data.remote/man/call_MODIS.Rd @@ -5,39 +5,39 @@ \title{call_MODIS} \usage{ call_MODIS( - outdir = NULL, var, + product, + band, site_info, product_dates, + outdir = NULL, run_parallel = FALSE, ncores = NULL, - product, - band, package_method = "MODISTools", QC_filter = FALSE, progress = FALSE ) } \arguments{ -\item{outdir}{where the output file will be stored. Default is NULL} - \item{var}{the simple name of the modis dataset variable (e.g. lai)} +\item{product}{string value for MODIS product number} + +\item{band}{string value for which measurement to extract} + \item{site_info}{Bety list of site info for parsing MODIS data: list(site_id, site_name, lat, lon, time_zone)} \item{product_dates}{a character vector of the start and end date of the data in YYYYJJJ} +\item{outdir}{where the output file will be stored. Default is NULL and in this case only values are returned. When path is provided values are returned and written to disk.} + \item{run_parallel}{optional method to download data paralleize. Only works if more than 1 site is needed and there are >1 CPUs available.} \item{ncores}{number of cpus to use if run_parallel is set to TRUE. If you do not know the number of CPU's available, enter NULL.} -\item{product}{string value for MODIS product number} - -\item{band}{string value for which measurement to extract} - \item{package_method}{string value to inform function of which package method to use to download modis data. Either "MODISTools" or "reticulate" (optional)} @@ -64,14 +64,14 @@ site_info <- list( lon = 90, time_zone = "UTC") test_modistools <- call_MODIS( - outdir = NULL, var = "lai", + product = "MOD15A2H", + band = "Lai_500m", site_info = site_info, product_dates = c("2001150", "2001365"), + outdir = NULL, run_parallel = TRUE, ncores = NULL, - product = "MOD15A2H", - band = "Lai_500m", package_method = "MODISTools", QC_filter = TRUE, progress = FALSE) diff --git a/modules/emulator/DESCRIPTION b/modules/emulator/DESCRIPTION index 00204e8566c..cd10e25c424 100644 --- a/modules/emulator/DESCRIPTION +++ b/modules/emulator/DESCRIPTION @@ -13,7 +13,7 @@ Imports: mlegp, coda (>= 0.18), MASS, - tmvtnorm, + TruncatedNormal (>= 2.2), lqmm, MCMCpack Description: Implementation of a Gaussian Process model (both likelihood and @@ -21,4 +21,4 @@ Description: Implementation of a Gaussian Process model (both likelihood and for sampling design and prediction. License: BSD_3_clause + file LICENSE Encoding: UTF-8 -RoxygenNote: 7.0.2 +RoxygenNote: 7.1.0 diff --git a/modules/emulator/R/minimize.GP.R b/modules/emulator/R/minimize.GP.R index 79a92468729..6e1c4508306 100644 --- a/modules/emulator/R/minimize.GP.R +++ b/modules/emulator/R/minimize.GP.R @@ -271,7 +271,7 @@ mcmc.GP <- function(gp, x0, nmcmc, rng, format = "lin", mix = "joint", splinefcn } ## propose new parameters - xnew <- tmvtnorm::rtmvnorm(1, mean = c(xcurr), sigma = jcov, lower = rng[,1], upper = rng[,2]) + xnew <- TruncatedNormal::rtmvnorm(1, mu = c(xcurr), sigma = jcov, lb = rng[,1], ub = rng[,2]) # if(bounded(xnew,rng)){ # re-predict SS @@ -282,16 +282,16 @@ mcmc.GP <- function(gp, x0, nmcmc, rng, format = "lin", mix = "joint", splinefcn # don't update the currllp ( = llik.par, e.g. tau) yet # calculate posterior with xcurr | currllp ycurr <- get_y(currSS, xcurr, llik.fn, priors, currllp) - HRcurr <- tmvtnorm::dtmvnorm(c(xnew), c(xcurr), jcov, - lower = rng[,1], upper = rng[,2], log = TRUE) + HRcurr <- TruncatedNormal::dtmvnorm(c(xnew), c(xcurr), jcov, + lb = rng[,1], ub = rng[,2], log = TRUE, B = 1e2) newSS <- get_ss(gp, xnew, pos.check) if(all(newSS != -Inf)){ newllp <- pda.calc.llik.par(settings, n.of.obs, newSS, hyper.pars) ynew <- get_y(newSS, xnew, llik.fn, priors, newllp) - HRnew <- tmvtnorm::dtmvnorm(c(xcurr), c(xnew), jcov, - lower = rng[,1], upper = rng[,2], log = TRUE) + HRnew <- TruncatedNormal::dtmvnorm(c(xcurr), c(xnew), jcov, + lb = rng[,1], ub = rng[,2], log = TRUE, B = 1e2) if (is.accepted(ycurr+HRcurr, ynew+HRnew)) { xcurr <- xnew diff --git a/modules/meta.analysis/R/meta.analysis.R b/modules/meta.analysis/R/meta.analysis.R index 113c6e4b757..e572b6f5128 100644 --- a/modules/meta.analysis/R/meta.analysis.R +++ b/modules/meta.analysis/R/meta.analysis.R @@ -109,11 +109,10 @@ pecan.ma <- function(trait.data, prior.distns, ## check for excess missing data if (all(is.na(data[["obs.prec"]]))) { - if (verbose) { - writeLines("NO ERROR STATS PROVIDED, DROPPING RANDOM EFFECTS") - } - data$site <- rep(1, nrow(data)) - data$trt <- rep(0, nrow(data)) + PEcAn.logger::logger.warn("NO ERROR STATS PROVIDED\n Check meta-analysis Model Convergence", + "and consider turning off Random Effects by", + "setting FALSE", + "in your pecan.xml settings file ") } if (!random) { diff --git a/scripts/compile.sh b/scripts/compile.sh new file mode 100755 index 00000000000..462c2ac55aa --- /dev/null +++ b/scripts/compile.sh @@ -0,0 +1,3 @@ +#!/bin/bash + +docker-compose exec executor sh -c 'cd /pecan && make' diff --git a/shiny/BrownDog/server.R b/shiny/BrownDog/server.R index bdf6cf74178..99ea2fbe366 100644 --- a/shiny/BrownDog/server.R +++ b/shiny/BrownDog/server.R @@ -33,9 +33,8 @@ server <- shinyServer(function(input, output, session) { output$modelSelector <- renderUI({ bety <- betyConnect("../../web/config.php") - con <- bety$con - on.exit(db.close(con), add = TRUE) - models <- db.query("SELECT name FROM modeltypes;", con) + on.exit(db.close(bety), add = TRUE) + models <- db.query("SELECT name FROM modeltypes;", bety) selectInput("model", "Model", models) }) @@ -75,8 +74,7 @@ server <- shinyServer(function(input, output, session) { observeEvent(input$type, { # get all sites name, lat and lon by sitegroups bety <- betyConnect("../../web/config.php") - con <- bety$con - on.exit(db.close(con), add = TRUE) + on.exit(db.close(bety), add = TRUE) sites <- db.query( paste0( @@ -87,7 +85,7 @@ server <- shinyServer(function(input, output, session) { input$type, "');" ), - con + bety ) if(length(sites) > 0){ diff --git a/shiny/ViewMet/server.R b/shiny/ViewMet/server.R index 4cbacdea701..3cb45c022e0 100644 --- a/shiny/ViewMet/server.R +++ b/shiny/ViewMet/server.R @@ -138,7 +138,7 @@ server <- function(input, output, session) { formatid <- tbl(bety, "inputs") %>% filter(id == inputid) %>% pull(format_id) siteid <- tbl(bety, "inputs") %>% filter(id == inputid) %>% pull(site_id) - site = query.site(con = bety$con, siteid) + site = query.site(con = bety, siteid) current_nc <- ncdf4::nc_open(rv$load.paths[i]) vars_in_file <- names(current_nc[["var"]]) diff --git a/shiny/dbsync/Dockerfile b/shiny/dbsync/Dockerfile index 2506d2236c5..d5a89a6ca0f 100644 --- a/shiny/dbsync/Dockerfile +++ b/shiny/dbsync/Dockerfile @@ -6,10 +6,16 @@ ENV PGHOST=postgres \ PGPASSWORD=bety \ GEOCACHE=/srv/shiny-server/geoip.json -RUN apt-get -y install libpq-dev libssl-dev \ +RUN apt-get update \ + && apt-get -y install libpq-dev libssl-dev \ && install2.r -e -s -n -1 curl dbplyr DT leaflet RPostgreSQL \ - && rm -rf /srv/shiny-server/* + && rm -rf /srv/shiny-server/* \ + && rm -rf /var/lib/apt/lists/* ADD . /srv/shiny-server/ +ADD https://raw.githubusercontent.com/rocker-org/shiny/master/shiny-server.sh /usr/bin/ + +RUN chmod +x /usr/bin/shiny-server.sh + # special script to start shiny server and preserve env variable CMD /srv/shiny-server/save-env-shiny.sh diff --git a/shiny/dbsync/app.R b/shiny/dbsync/app.R index 98fdda1bc21..da4f3065247 100644 --- a/shiny/dbsync/app.R +++ b/shiny/dbsync/app.R @@ -28,6 +28,9 @@ host_mapping <- list( "paleon-pecan.virtual.crc.nd.edu"="crc.nd.edu" ) +# ignored servers, is reset on refresh +ignored_servers <- c() + # given a IP address lookup geo spatital info # uses a cache to prevent to many requests (1000 per day) get_geoip <- function(ip) { @@ -55,6 +58,8 @@ get_geoip <- function(ip) { # get a list of all servers in BETY and their geospatial location get_servers <- function() { + ignored_servers <<- c() + # connect to BETYdb bety <- DBI::dbConnect( DBI::dbDriver("PostgreSQL"), @@ -104,16 +109,21 @@ get_servers <- function() { # fetch information from the actual servers check_servers <- function(servers, progress) { + check_servers <- servers$sync_url[! servers$sync_host_id %in% ignored_servers] + # generic failure message to increment progress failure <- function(res) { + print(res) progress$inc(amount = 1) } # version information server_version <- function(res) { + url <- sub("version.txt", "bety.tar.gz", res$url) progress$inc(amount = 0, message = paste("Processing", progress$getValue(), "of", progress$getMax())) - if (res$status == 200) { - url <- sub("version.txt", "bety.tar.gz", res$url) + print(paste(res$status, url)) + if (res$status == 200 || res$status == 226) { + check_servers <<- check_servers[check_servers != url] version <- strsplit(rawToChar(res$content), '\t', fixed = TRUE)[[1]] if (!is.na(as.numeric(version[1]))) { servers[servers$sync_url == url,'version'] <<- version[2] @@ -127,14 +137,15 @@ check_servers <- function(servers, progress) { } progress$inc(amount = 1) } - urls <- sapply(servers[,'sync_url'], function(x) { sub("bety.tar.gz", "version.txt", x) }) - lapply(urls, function(x) { curl::curl_fetch_multi(x, done = server_version, fail = failure, handle = curl::new_handle(connecttimeout=1)) }) + urls <- sapply(check_servers, function(x) { sub("bety.tar.gz", "version.txt", x) }) + lapply(urls, function(x) { curl::curl_fetch_multi(x, done = server_version, fail = failure) } ) # log information server_log <- function(res) { + url <- sub("sync.log", "bety.tar.gz", res$url) progress$inc(amount = 0, message = paste("Processing", progress$getValue(), "of", progress$getMax())) - if (res$status == 200) { - url <- sub("sync.log", "bety.tar.gz", res$url) + print(paste(res$status, url)) + if (res$status == 200 || res$status == 226) { lines <- strsplit(rawToChar(res$content), '\n', fixed = TRUE)[[1]] now <- as.POSIXlt(Sys.time(), tz="UTC") for (line in tail(lines, maxlines)) { @@ -152,12 +163,13 @@ check_servers <- function(servers, progress) { } progress$inc(amount = 1) } - urls <- sapply(servers[,'sync_url'], function(x) { sub("bety.tar.gz", "sync.log", x) }) - lapply(urls, function(x) { curl::curl_fetch_multi(x, done = server_log, fail = failure, handle = curl::new_handle(connecttimeout=1)) }) + urls <- sapply(check_servers, function(x) { sub("bety.tar.gz", "sync.log", x) }) + lapply(urls, function(x) { curl::curl_fetch_multi(x, done = server_log, fail = failure) } ) # run queries in parallel curl::multi_run() - myservers <<- servers + ignored_servers <<- c(ignored_servers, servers[servers$sync_url %in% check_servers, "sync_host_id"]) + return(servers) } @@ -257,12 +269,15 @@ server <- function(input, output, session) { # update sync list (slow) observeEvent(input$refresh_sync, { + servers <- values$servers session$sendCustomMessage("disableUI", "") - progress <- Progress$new(session, min=0, max=2*nrow(values$servers)) - values$servers <- check_servers(values$servers, progress) - values$sync <- check_sync(values$servers) + progress <- Progress$new(session, min=0, max=2*(nrow(servers)-length(ignored_servers))) + servers <- check_servers(servers, progress) + sync <- check_sync(servers) progress$close() session$sendCustomMessage("enableUI", "") + values$servers <- servers + values$sync <- sync }) # create a map of all servers that have a sync_host_id and sync_url @@ -282,7 +297,11 @@ server <- function(input, output, session) { # create a table of all servers that have a sync_host_id and sync_url output$table <- DT::renderDataTable({ - DT::datatable(values$servers %>% dplyr::select("sync_host_id", "hostname", "city", "country", "lastdump", "migrations")) + ignored <- rep("gray", length(ignored_servers) + 1) + DT::datatable(values$servers %>% + dplyr::select("sync_host_id", "hostname", "city", "country", "lastdump", "migrations"), + rownames = FALSE) %>% + DT::formatStyle('sync_host_id', target = "row", color = DT::styleEqual(c(ignored_servers, "-1"), ignored)) }) } diff --git a/tests/docker.sipnet.xml b/tests/docker.sipnet.xml new file mode 100644 index 00000000000..3da74156bc5 --- /dev/null +++ b/tests/docker.sipnet.xml @@ -0,0 +1,67 @@ + + + /data/tests/sipnet + + + + PostgreSQL + bety + bety + postgres + bety + FALSE + + + + + + temperate.coniferous + + + + + 3000 + FALSE + 1.2 + AUTO + + + + NPP + + + + + -1 + 1 + + NPP + + + + SIPNET + r136 + + + + + 772 + + + + 5000000005 + + + 2002-01-01 00:00:00 + 2005-12-31 00:00:00 + pecan/dbfiles + + + + localhost + + amqp://guest:guest@rabbitmq/%2F + SIPNET_r136 + + + diff --git a/web/04-runpecan.php b/web/04-runpecan.php index 16d2f44626c..2bbf990d70e 100644 --- a/web/04-runpecan.php +++ b/web/04-runpecan.php @@ -532,7 +532,11 @@ } # create the message - $message = '{"folder": "' . $folder . '", "workflowid": "' . $workflowid . '"}'; + $message = '{"folder": "' . $folder . '", "workflowid": "' . $workflowid . '"'; + if ($model_edit) { + $message .= ', "modeledit": true'; + } + $message .= '}'; send_rabbitmq_message($message, $rabbitmq_uri, $rabbitmq_queue); #done diff --git a/web/07-continue.php b/web/07-continue.php index 9b319de9799..6e5142c5a44 100644 --- a/web/07-continue.php +++ b/web/07-continue.php @@ -53,7 +53,6 @@ $stmt->closeCursor(); close_database(); -$exec = "R_LIBS_USER=\"$R_library_path\" $Rbinary CMD BATCH"; $path = "05-running.php?workflowid=$workflowid&hostname=${hostname}"; if ($pecan_edit) { $path .= "&pecan_edit=pecan_edit"; @@ -74,13 +73,6 @@ $fh = fopen($folder . DIRECTORY_SEPARATOR . "STATUS", 'a') or die("can't open file"); fwrite($fh, "\t" . date("Y-m-d H:i:s") . "\tDONE\t\n"); fclose($fh); - - $exec .= " --continue workflow.R workflow2.Rout"; -} else { - if ($model_edit) { - $exec .= " --advanced"; - } - $exec .= " workflow.R"; } # start the workflow again @@ -91,11 +83,28 @@ } else { $rabbitmq_queue = "pecan"; } - $msg_exec = str_replace("\"", "'", $exec); - $message = '{"folder": "' . $folder . '", "custom_application": "' . $msg_exec . '"}'; + + $message = '{"folder": "' . $folder . '", "workflowid": "' . $workflowid . '"'; + if (file_exists($folder . DIRECTORY_SEPARATOR . "STATUS")) { + $message .= ', "continue": true'; + } else if ($model_edit) { + $message .= ', "modeledit": true'; + } + $message .= '}'; send_rabbitmq_message($message, $rabbitmq_uri, $rabbitmq_queue); } else { chdir($folder); + + $exec = "R_LIBS_USER=\"$R_library_path\" $Rbinary CMD BATCH"; + if (file_exists($folder . DIRECTORY_SEPARATOR . "STATUS")) { + $exec .= " --continue workflow.R workflow2.Rout"; + } else { + if ($model_edit) { + $exec .= " --advanced"; + } + $exec .= " workflow.R"; + } + pclose(popen("$exec &", 'r')); }