diff --git a/.github/actions/check_changelog/package.json b/.github/actions/check_changelog/package.json index b5e5ad3213e5..cd086b5097a7 100644 --- a/.github/actions/check_changelog/package.json +++ b/.github/actions/check_changelog/package.json @@ -6,5 +6,5 @@ "@actions/exec": "1.1.1", "@actions/github": "6.0.0" }, - "packageManager": "yarn@4.0.2" + "packageManager": "yarn@4.1.0" } diff --git a/.github/actions/check_changelog/yarn.lock b/.github/actions/check_changelog/yarn.lock index 1a564f5c72f8..08e54ab4edcb 100644 --- a/.github/actions/check_changelog/yarn.lock +++ b/.github/actions/check_changelog/yarn.lock @@ -11,7 +11,7 @@ __metadata: dependencies: "@actions/http-client": "npm:^2.0.1" uuid: "npm:^8.3.2" - checksum: 7a61446697a23dcad3545cf0634dedbdedf20ae9a0ee6ee977554589a15deb4a93593ee48a41258933d58ce0778f446b0d2c162b60750956fb75e0b9560fb832 + checksum: 10c0/7a61446697a23dcad3545cf0634dedbdedf20ae9a0ee6ee977554589a15deb4a93593ee48a41258933d58ce0778f446b0d2c162b60750956fb75e0b9560fb832 languageName: node linkType: hard @@ -20,7 +20,7 @@ __metadata: resolution: "@actions/exec@npm:1.1.1" dependencies: "@actions/io": "npm:^1.0.1" - checksum: 4a09f6bdbe50ce68b5cf8a7254d176230d6a74bccf6ecc3857feee209a8c950ba9adec87cc5ecceb04110182d1c17117234e45557d72fde6229b7fd3a395322a + checksum: 10c0/4a09f6bdbe50ce68b5cf8a7254d176230d6a74bccf6ecc3857feee209a8c950ba9adec87cc5ecceb04110182d1c17117234e45557d72fde6229b7fd3a395322a languageName: node linkType: hard @@ -32,7 +32,7 @@ __metadata: "@octokit/core": "npm:^5.0.1" "@octokit/plugin-paginate-rest": "npm:^9.0.0" "@octokit/plugin-rest-endpoint-methods": "npm:^10.0.0" - checksum: 6f86f564e6ec5873c69ff23bed308cef5f964dbdb559c5415c1ba479517bf18352713a2a757c27f8f67a3d675fdd78446cf142b27762489f697edf9c58e72378 + checksum: 10c0/6f86f564e6ec5873c69ff23bed308cef5f964dbdb559c5415c1ba479517bf18352713a2a757c27f8f67a3d675fdd78446cf142b27762489f697edf9c58e72378 languageName: node linkType: hard @@ -42,28 +42,28 @@ __metadata: dependencies: tunnel: "npm:^0.0.6" undici: "npm:^5.25.4" - checksum: 868fe8529d78beb72f84ea2486e232fa6f66abe00d6ec4591b98c37e762c3d812868a3548638d75b49917961fd10ba1556916b47b1e9e4b55c266e2013c3ae8e + checksum: 10c0/868fe8529d78beb72f84ea2486e232fa6f66abe00d6ec4591b98c37e762c3d812868a3548638d75b49917961fd10ba1556916b47b1e9e4b55c266e2013c3ae8e languageName: node linkType: hard "@actions/io@npm:^1.0.1": version: 1.1.3 resolution: "@actions/io@npm:1.1.3" - checksum: 5b8751918e5bf0bebd923ba917fb1c0e294401e7ff0037f32c92a4efa4215550df1f6633c63fd4efb2bdaae8711e69b9e36925857db1f38935ff62a5c92ec29e + checksum: 10c0/5b8751918e5bf0bebd923ba917fb1c0e294401e7ff0037f32c92a4efa4215550df1f6633c63fd4efb2bdaae8711e69b9e36925857db1f38935ff62a5c92ec29e languageName: node linkType: hard "@fastify/busboy@npm:^2.0.0": version: 2.1.0 resolution: "@fastify/busboy@npm:2.1.0" - checksum: 7bb641080aac7cf01d88749ad331af10ba9ec3713ec07cabbe833908c75df21bd56249bb6173bdec07f5a41896b21e3689316f86684c06635da45f91ff4565a2 + checksum: 10c0/7bb641080aac7cf01d88749ad331af10ba9ec3713ec07cabbe833908c75df21bd56249bb6173bdec07f5a41896b21e3689316f86684c06635da45f91ff4565a2 languageName: node linkType: hard "@octokit/auth-token@npm:^4.0.0": version: 4.0.0 resolution: "@octokit/auth-token@npm:4.0.0" - checksum: 57acaa6c394c5abab2f74e8e1dcf4e7a16b236f713c77a54b8f08e2d14114de94b37946259e33ec2aab0566b26f724c2b71d2602352b59e541a9854897618f3c + checksum: 10c0/57acaa6c394c5abab2f74e8e1dcf4e7a16b236f713c77a54b8f08e2d14114de94b37946259e33ec2aab0566b26f724c2b71d2602352b59e541a9854897618f3c languageName: node linkType: hard @@ -78,7 +78,7 @@ __metadata: "@octokit/types": "npm:^12.0.0" before-after-hook: "npm:^2.2.0" universal-user-agent: "npm:^6.0.0" - checksum: a1d2882373b4a33cd9f6e56d76bcc82e5589a477829fc3491b1ef471a8a83fa437b339a2c76d97d9e8ea4ca12bf3ebf32e66119ba16977e542d98f1f5dd3c994 + checksum: 10c0/a1d2882373b4a33cd9f6e56d76bcc82e5589a477829fc3491b1ef471a8a83fa437b339a2c76d97d9e8ea4ca12bf3ebf32e66119ba16977e542d98f1f5dd3c994 languageName: node linkType: hard @@ -88,7 +88,7 @@ __metadata: dependencies: "@octokit/types": "npm:^12.0.0" universal-user-agent: "npm:^6.0.0" - checksum: f1c857c5d85afa9d7e8857f7f97dbec28d3b6ab1dc21fe35172f1bc9e5512c8a3a26edabf6b2d83bb60d700f7ad290c96be960496aa83606095630edfad06db4 + checksum: 10c0/f1c857c5d85afa9d7e8857f7f97dbec28d3b6ab1dc21fe35172f1bc9e5512c8a3a26edabf6b2d83bb60d700f7ad290c96be960496aa83606095630edfad06db4 languageName: node linkType: hard @@ -99,14 +99,14 @@ __metadata: "@octokit/request": "npm:^8.0.1" "@octokit/types": "npm:^12.0.0" universal-user-agent: "npm:^6.0.0" - checksum: 96e5d6b970be60877134cc147b9249534f3a79d691b9932d731d453426fa1e1a0a36111a1b0a6ab43d61309c630903a65db5559b5c800300dc26cf588f50fea8 + checksum: 10c0/96e5d6b970be60877134cc147b9249534f3a79d691b9932d731d453426fa1e1a0a36111a1b0a6ab43d61309c630903a65db5559b5c800300dc26cf588f50fea8 languageName: node linkType: hard "@octokit/openapi-types@npm:^19.1.0": version: 19.1.0 resolution: "@octokit/openapi-types@npm:19.1.0" - checksum: ae8081f52b797b91a12d4f6cddc475699c9d34b06645b337adc77d30b583d8fe8506597a45c42f8f1a96bfb2a9d092cee257d8a65d718bfeed23a0d153448eea + checksum: 10c0/ae8081f52b797b91a12d4f6cddc475699c9d34b06645b337adc77d30b583d8fe8506597a45c42f8f1a96bfb2a9d092cee257d8a65d718bfeed23a0d153448eea languageName: node linkType: hard @@ -117,7 +117,7 @@ __metadata: "@octokit/types": "npm:^12.4.0" peerDependencies: "@octokit/core": ">=5" - checksum: a17055dff8fde5ebc03bf935294ffa4605ed714cb15252f0fa63cda1b95e738fafb5ab9748b18fbdfa5615d5f6686cbf193c6d6426e7dc4fd1dda91c87263f3b + checksum: 10c0/a17055dff8fde5ebc03bf935294ffa4605ed714cb15252f0fa63cda1b95e738fafb5ab9748b18fbdfa5615d5f6686cbf193c6d6426e7dc4fd1dda91c87263f3b languageName: node linkType: hard @@ -128,7 +128,7 @@ __metadata: "@octokit/types": "npm:^12.3.0" peerDependencies: "@octokit/core": ">=5" - checksum: 4d00a2334753955f0c3841ba8fc0880c093b94838e011864ee737d958d2d64e3d45d34fa4c8b64bccf9e13c6de81318cbd6e2b24df37992941d12f54def28432 + checksum: 10c0/4d00a2334753955f0c3841ba8fc0880c093b94838e011864ee737d958d2d64e3d45d34fa4c8b64bccf9e13c6de81318cbd6e2b24df37992941d12f54def28432 languageName: node linkType: hard @@ -139,7 +139,7 @@ __metadata: "@octokit/types": "npm:^12.0.0" deprecation: "npm:^2.0.0" once: "npm:^1.4.0" - checksum: e72a4627120de345b54876a1f007664095e5be9d624fce2e14fccf7668cd8f5e4929d444d8fc085d48e1fb5cd548538453974aab129a669101110d6679dce6c6 + checksum: 10c0/e72a4627120de345b54876a1f007664095e5be9d624fce2e14fccf7668cd8f5e4929d444d8fc085d48e1fb5cd548538453974aab129a669101110d6679dce6c6 languageName: node linkType: hard @@ -151,7 +151,7 @@ __metadata: "@octokit/request-error": "npm:^5.0.0" "@octokit/types": "npm:^12.0.0" universal-user-agent: "npm:^6.0.0" - checksum: 0789edd3b600c5b7ca74089e2842b7bb679a0ad1ec56e5dda54f052d2dd266ac8e6e2eb3c34ba57962066f0770444bf1e99805fd2d762a47776f567beafcf038 + checksum: 10c0/0789edd3b600c5b7ca74089e2842b7bb679a0ad1ec56e5dda54f052d2dd266ac8e6e2eb3c34ba57962066f0770444bf1e99805fd2d762a47776f567beafcf038 languageName: node linkType: hard @@ -160,14 +160,14 @@ __metadata: resolution: "@octokit/types@npm:12.4.0" dependencies: "@octokit/openapi-types": "npm:^19.1.0" - checksum: b52b3fd8af307a1868846991f8376548a790814b20639dee1110271a768c0489081970df893ca2230f6285066003230d22f5877eeac90418971a475c79808241 + checksum: 10c0/b52b3fd8af307a1868846991f8376548a790814b20639dee1110271a768c0489081970df893ca2230f6285066003230d22f5877eeac90418971a475c79808241 languageName: node linkType: hard "before-after-hook@npm:^2.2.0": version: 2.2.3 resolution: "before-after-hook@npm:2.2.3" - checksum: 0488c4ae12df758ca9d49b3bb27b47fd559677965c52cae7b335784724fb8bf96c42b6e5ba7d7afcbc31facb0e294c3ef717cc41c5bc2f7bd9e76f8b90acd31c + checksum: 10c0/0488c4ae12df758ca9d49b3bb27b47fd559677965c52cae7b335784724fb8bf96c42b6e5ba7d7afcbc31facb0e294c3ef717cc41c5bc2f7bd9e76f8b90acd31c languageName: node linkType: hard @@ -184,7 +184,7 @@ __metadata: "deprecation@npm:^2.0.0": version: 2.3.1 resolution: "deprecation@npm:2.3.1" - checksum: 23d688ba66b74d09b908c40a76179418acbeeb0bfdf218c8075c58ad8d0c315130cb91aa3dffb623aa3a411a3569ce56c6460de6c8d69071c17fe6dd2442f032 + checksum: 10c0/23d688ba66b74d09b908c40a76179418acbeeb0bfdf218c8075c58ad8d0c315130cb91aa3dffb623aa3a411a3569ce56c6460de6c8d69071c17fe6dd2442f032 languageName: node linkType: hard @@ -193,14 +193,14 @@ __metadata: resolution: "once@npm:1.4.0" dependencies: wrappy: "npm:1" - checksum: 5d48aca287dfefabd756621c5dfce5c91a549a93e9fdb7b8246bc4c4790aa2ec17b34a260530474635147aeb631a2dcc8b32c613df0675f96041cbb8244517d0 + checksum: 10c0/5d48aca287dfefabd756621c5dfce5c91a549a93e9fdb7b8246bc4c4790aa2ec17b34a260530474635147aeb631a2dcc8b32c613df0675f96041cbb8244517d0 languageName: node linkType: hard "tunnel@npm:^0.0.6": version: 0.0.6 resolution: "tunnel@npm:0.0.6" - checksum: e27e7e896f2426c1c747325b5f54efebc1a004647d853fad892b46d64e37591ccd0b97439470795e5262b5c0748d22beb4489a04a0a448029636670bfd801b75 + checksum: 10c0/e27e7e896f2426c1c747325b5f54efebc1a004647d853fad892b46d64e37591ccd0b97439470795e5262b5c0748d22beb4489a04a0a448029636670bfd801b75 languageName: node linkType: hard @@ -209,14 +209,14 @@ __metadata: resolution: "undici@npm:5.28.3" dependencies: "@fastify/busboy": "npm:^2.0.0" - checksum: 3c559ae50ef3104b7085251445dda6f4de871553b9e290845649d2f80b06c0c9cfcdf741b0029c6b20d36c82e6a74dc815b139fa9a26757d70728074ca6d6f5c + checksum: 10c0/3c559ae50ef3104b7085251445dda6f4de871553b9e290845649d2f80b06c0c9cfcdf741b0029c6b20d36c82e6a74dc815b139fa9a26757d70728074ca6d6f5c languageName: node linkType: hard "universal-user-agent@npm:^6.0.0": version: 6.0.1 resolution: "universal-user-agent@npm:6.0.1" - checksum: 5c9c46ffe19a975e11e6443640ed4c9e0ce48fcc7203325757a8414ac49940ebb0f4667f2b1fa561489d1eb22cb2d05a0f7c82ec20c5cba42e58e188fb19b187 + checksum: 10c0/5c9c46ffe19a975e11e6443640ed4c9e0ce48fcc7203325757a8414ac49940ebb0f4667f2b1fa561489d1eb22cb2d05a0f7c82ec20c5cba42e58e188fb19b187 languageName: node linkType: hard @@ -225,13 +225,13 @@ __metadata: resolution: "uuid@npm:8.3.2" bin: uuid: dist/bin/uuid - checksum: bcbb807a917d374a49f475fae2e87fdca7da5e5530820ef53f65ba1d12131bd81a92ecf259cc7ce317cbe0f289e7d79fdfebcef9bfa3087c8c8a2fa304c9be54 + checksum: 10c0/bcbb807a917d374a49f475fae2e87fdca7da5e5530820ef53f65ba1d12131bd81a92ecf259cc7ce317cbe0f289e7d79fdfebcef9bfa3087c8c8a2fa304c9be54 languageName: node linkType: hard "wrappy@npm:1": version: 1.0.2 resolution: "wrappy@npm:1.0.2" - checksum: 56fece1a4018c6a6c8e28fbc88c87e0fbf4ea8fd64fc6c63b18f4acc4bd13e0ad2515189786dd2c30d3eec9663d70f4ecf699330002f8ccb547e4a18231fc9f0 + checksum: 10c0/56fece1a4018c6a6c8e28fbc88c87e0fbf4ea8fd64fc6c63b18f4acc4bd13e0ad2515189786dd2c30d3eec9663d70f4ecf699330002f8ccb547e4a18231fc9f0 languageName: node linkType: hard diff --git a/CHANGELOG.md b/CHANGELOG.md index 0f6f436c42bc..d3749b81c4f1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,10 @@ ## Unreleased +## v7.0.0 + +- See https://github.com/redwoodjs/redwood/releases/tag/v7.0.0 for the release notes and https://community.redwoodjs.com/t/redwood-v7-0-0-upgrade-guide/5713 for the upgrade guide + ## v6.6.4 - See https://github.com/redwoodjs/redwood/releases/tag/v6.6.4 diff --git a/__fixtures__/fragment-test-project/api/package.json b/__fixtures__/fragment-test-project/api/package.json index 1fe562e03fbe..06193a97741f 100644 --- a/__fixtures__/fragment-test-project/api/package.json +++ b/__fixtures__/fragment-test-project/api/package.json @@ -3,8 +3,8 @@ "version": "0.0.0", "private": true, "dependencies": { - "@redwoodjs/api": "6.0.7", - "@redwoodjs/auth-dbauth-api": "6.0.7", - "@redwoodjs/graphql-server": "6.0.7" + "@redwoodjs/api": "7.0.0", + "@redwoodjs/auth-dbauth-api": "7.0.0", + "@redwoodjs/graphql-server": "7.0.0" } } diff --git a/__fixtures__/fragment-test-project/package.json b/__fixtures__/fragment-test-project/package.json index 3d2f18b5f1d5..eb82cb3cd099 100644 --- a/__fixtures__/fragment-test-project/package.json +++ b/__fixtures__/fragment-test-project/package.json @@ -7,8 +7,8 @@ ] }, "devDependencies": { - "@redwoodjs/core": "6.0.7", - "@redwoodjs/project-config": "6.0.7" + "@redwoodjs/core": "7.0.0", + "@redwoodjs/project-config": "7.0.0" }, "eslintConfig": { "extends": "@redwoodjs/eslint-config", @@ -20,5 +20,5 @@ "prisma": { "seed": "yarn rw exec seed" }, - "packageManager": "yarn@4.0.2" + "packageManager": "yarn@4.1.0" } diff --git a/__fixtures__/fragment-test-project/web/package.json b/__fixtures__/fragment-test-project/web/package.json index 089dd533bc0c..b75f3bedd4e3 100644 --- a/__fixtures__/fragment-test-project/web/package.json +++ b/__fixtures__/fragment-test-project/web/package.json @@ -11,16 +11,16 @@ ] }, "dependencies": { - "@redwoodjs/auth-dbauth-web": "6.0.7", - "@redwoodjs/forms": "6.0.7", - "@redwoodjs/router": "6.0.7", - "@redwoodjs/web": "6.0.7", + "@redwoodjs/auth-dbauth-web": "7.0.0", + "@redwoodjs/forms": "7.0.0", + "@redwoodjs/router": "7.0.0", + "@redwoodjs/web": "7.0.0", "humanize-string": "2.1.0", "react": "0.0.0-experimental-e5205658f-20230913", "react-dom": "0.0.0-experimental-e5205658f-20230913" }, "devDependencies": { - "@redwoodjs/vite": "6.0.7", + "@redwoodjs/vite": "7.0.0", "@types/react": "18.2.37", "@types/react-dom": "18.2.15", "autoprefixer": "^10.4.16", diff --git a/__fixtures__/test-project-rsa/package.json b/__fixtures__/test-project-rsa/package.json index d40992f2f0ac..2e99328ded69 100644 --- a/__fixtures__/test-project-rsa/package.json +++ b/__fixtures__/test-project-rsa/package.json @@ -19,7 +19,7 @@ "prisma": { "seed": "yarn rw exec seed" }, - "packageManager": "yarn@4.0.2", + "packageManager": "yarn@4.1.0", "resolutions": { "vite@4.4.9": "patch:vite@npm%3A4.4.9#./.yarn/patches/vite-npm-4.4.9-e845c1bbf8.patch" } diff --git a/__fixtures__/test-project/api/package.json b/__fixtures__/test-project/api/package.json index 1fe562e03fbe..06193a97741f 100644 --- a/__fixtures__/test-project/api/package.json +++ b/__fixtures__/test-project/api/package.json @@ -3,8 +3,8 @@ "version": "0.0.0", "private": true, "dependencies": { - "@redwoodjs/api": "6.0.7", - "@redwoodjs/auth-dbauth-api": "6.0.7", - "@redwoodjs/graphql-server": "6.0.7" + "@redwoodjs/api": "7.0.0", + "@redwoodjs/auth-dbauth-api": "7.0.0", + "@redwoodjs/graphql-server": "7.0.0" } } diff --git a/__fixtures__/test-project/package.json b/__fixtures__/test-project/package.json index e74028e8ed71..eb82cb3cd099 100644 --- a/__fixtures__/test-project/package.json +++ b/__fixtures__/test-project/package.json @@ -7,8 +7,8 @@ ] }, "devDependencies": { - "@redwoodjs/core": "6.0.7", - "@redwoodjs/project-config": "6.0.7" + "@redwoodjs/core": "7.0.0", + "@redwoodjs/project-config": "7.0.0" }, "eslintConfig": { "extends": "@redwoodjs/eslint-config", diff --git a/__fixtures__/test-project/web/package.json b/__fixtures__/test-project/web/package.json index f57f80d958ad..433398a0e250 100644 --- a/__fixtures__/test-project/web/package.json +++ b/__fixtures__/test-project/web/package.json @@ -11,16 +11,16 @@ ] }, "dependencies": { - "@redwoodjs/auth-dbauth-web": "6.0.7", - "@redwoodjs/forms": "6.0.7", - "@redwoodjs/router": "6.0.7", - "@redwoodjs/web": "6.0.7", + "@redwoodjs/auth-dbauth-web": "7.0.0", + "@redwoodjs/forms": "7.0.0", + "@redwoodjs/router": "7.0.0", + "@redwoodjs/web": "7.0.0", "humanize-string": "2.1.0", "react": "0.0.0-experimental-e5205658f-20230913", "react-dom": "0.0.0-experimental-e5205658f-20230913" }, "devDependencies": { - "@redwoodjs/vite": "6.0.7", + "@redwoodjs/vite": "7.0.0", "@types/react": "^18.2.55", "@types/react-dom": "^18.2.19", "autoprefixer": "^10.4.17", diff --git a/docs/docs/authentication.md b/docs/docs/authentication.md index cef3754ea978..88506057eb2f 100644 --- a/docs/docs/authentication.md +++ b/docs/docs/authentication.md @@ -5,7 +5,7 @@ description: Set up an authentication provider # Authentication Redwood has integrated auth end to end, from the web side to the api side. -On the web side, the router can protect pages via the `Private` component (or the `Set` component via the `private` prop), and even restrict access at the role-level. +On the web side, the router can protect pages via the `PrivateSet` component, and even restrict access at the role-level. And if you'd prefer to work with the primitives, the `useAuth` hook exposes all the pieces to build the experience you want. Likewise, the api side is locked down by default: all SDLs are generated with the `@requireAuth` directive, ensuring that making things publicly available is something that you opt in to rather than out of. @@ -117,11 +117,11 @@ Much of what the functions it returns do is self explanatory, but the options th ### Protecting routes -You can require that a user be authenticated to navigate to a route by wrapping it in the `Private` component or the `Set` component with the `private` prop set to `true`. +You can require that a user be authenticated to navigate to a route by wrapping it in the `PrivateSet` component. An unauthenticated user will be redirected to the route specified in either component's `unauthenticated` prop: ```tsx title="web/src/Routes.tsx" -import { Router, Route, Private } from '@redwoodjs/router' +import { Router, Route, PrivateSet } from '@redwoodjs/router' const Routes = () => { return ( @@ -129,21 +129,20 @@ const Routes = () => { - // highlight-start + // highlight-next-line - // highlight-end - + ) } ``` -You can also restrict access by role by passing a role or an array of roles to the `Private` or `Set` component's `hasRole` prop: +You can also restrict access by role by passing a role or an array of roles to the `PrivateSet` component's `hasRole` prop: ```tsx title="web/src/Routes.tsx" -import { Router, Route, Private, Set } from '@redwoodjs/router' +import { Router, Route, PrivateSet } from '@redwoodjs/router' const Routes = () => { return ( @@ -154,17 +153,17 @@ const Routes = () => { - + // highlight-next-line - + - + // highlight-next-line - + ) } diff --git a/docs/docs/docker.md b/docs/docs/docker.md index 33c2e65bd711..53a1c3aa0a38 100644 --- a/docs/docs/docker.md +++ b/docs/docs/docker.md @@ -637,10 +637,10 @@ await server.start() `start` is a thin wrapper around [`listen`](https://fastify.dev/docs/latest/Reference/Server/#listen). It takes the same arguments as `listen`, except for host and port. It computes those in the following way, in order of precedence: -1. `--host` or `--port` flags: +1. `--apiHost` or `--apiPort` flags: ``` - yarn node api/dist/server.js --host 0.0.0.0 --port 8913 + yarn node api/dist/server.js --apiHost 0.0.0.0 --apiPort 8913 ``` 2. `REDWOOD_API_HOST` or `REDWOOD_API_PORT` env vars: diff --git a/docs/docs/router.md b/docs/docs/router.md index f07df829f4ff..aa606ac91909 100644 --- a/docs/docs/router.md +++ b/docs/docs/router.md @@ -44,7 +44,7 @@ The `path` prop specifies the URL path to match, starting with the beginning sla ## Private Routes -Some pages should only be visible to authenticated users. +Some pages should only be visible to authenticated users. We support this using the `PrivateSet` component. Read more [further down](#privateset). ## Sets of Routes @@ -87,7 +87,7 @@ Conceptually, this fits with how we think about Context and Layouts as things th There's a lot of flexibility here. You can even nest `Sets` to great effect: ```jsx title="Routes.js" -import { Router, Route, Set, Private } from '@redwoodjs/router' +import { Router, Route, Set } from '@redwoodjs/router' import BlogContext from 'src/contexts/BlogContext' import BlogLayout from 'src/layouts/BlogLayout' import BlogNavLayout from 'src/layouts/BlogNavLayout' @@ -132,7 +132,7 @@ becomes... A `PrivateSet` makes all Routes inside that Set require authentication. When a user isn't authenticated and attempts to visit one of the Routes in the `PrivateSet`, they'll be redirected to the Route passed as the `PrivateSet`'s `unauthenticated` prop. The originally-requested Route's path is added to the query string as a `redirectTo` param. This lets you send the user to the page they originally requested once they're logged-in. -Here's an example of how you'd use a private set: +Here's an example of how you'd use a `PrivateSet`: ```jsx title="Routes.js" @@ -145,7 +145,7 @@ Here's an example of how you'd use a private set: For more fine-grained control, you can specify `roles` (which takes a string for a single role or an array of roles), and the router will check to see that the current user is authorized before giving them access to the Route. If they're not, they will be redirected to the page specified in the `unauthenticated` prop, such as a "forbidden" page. Read more about Role-based Access Control in Redwood [here](how-to/role-based-access-control.md). -To protect `Private` routes for access by a single role: +To protect private routes for access by a single role: ```jsx title="Routes.js" @@ -157,7 +157,7 @@ To protect `Private` routes for access by a single role: ``` -To protect `Private` routes for access by multiple roles: +To protect private routes for access by multiple roles: ```jsx title="Routes.js" @@ -613,7 +613,7 @@ Redwood will detect your explicit import and refrain from splitting that page in Because lazily-loaded pages can take a non-negligible amount of time to load (depending on bundle size and network connection), you may want to show a loading indicator to signal to the user that something is happening after they click a link. -In order to show a loader as your page chunks are loading, you simply add the `whileLoadingPage` prop to your route, `Set` or `Private` component. +In order to show a loader as your page chunks are loading, you simply add the `whileLoadingPage` prop to your route, `Set` or `PrivateSet` component. ```jsx title="Routes.js" import SkeletonLoader from 'src/components/SkeletonLoader' @@ -659,7 +659,7 @@ When the lazy-loaded page is loading, `PageLoadingContext.Consumer` will pass `{ Let's say you have a dashboard area on your Redwood app, which can only be accessed after logging in. When Redwood Router renders your private page, it will first fetch the user's details, and only render the page if it determines the user is indeed logged in. -In order to display a loader while auth details are being retrieved you can add the `whileLoadingAuth` prop to your private `` or `` component: +In order to display a loader while auth details are being retrieved you can add the `whileLoadingAuth` prop to your `PrivateSet` component: ```jsx //Routes.js @@ -675,7 +675,7 @@ In order to display a loader while auth details are being retrieved you can add {/* other routes */} - + ``` @@ -762,7 +762,7 @@ Note that if you're copy-pasting this example, it uses [Tailwind CSS](https://ta :::note Can I customize the development one? -As it's part of the RedwoodJS framework, you can't _change_ the dev fatal error page - but you can always build your own that takes the same props. If there's a feature you want to add to the built-in version, let us know on the [forums](https://community.redwoodjs.com/). +As it's part of the RedwoodJS framework, you can't _change_ the dev fatal error page, but you can always build your own that takes the same props. If there's a feature you want to add to the built-in version, let us know on the [forums](https://community.redwoodjs.com/). ::: diff --git a/docs/docs/tutorial/chapter0/what-is-redwood.md b/docs/docs/tutorial/chapter0/what-is-redwood.md index cae5b7f0565f..e779cafefdf2 100644 --- a/docs/docs/tutorial/chapter0/what-is-redwood.md +++ b/docs/docs/tutorial/chapter0/what-is-redwood.md @@ -31,7 +31,7 @@ You can start them both with a single command: `yarn redwood dev` When you open your web app in a browser, React does its thing initializing your app and monitoring the history for changes so that new content can be shown. Redwood features a custom, declarative Router that lets you specify URLs and the requisite pages (just a React component) will be shown. A simple routes file may look something like: ```jsx -import { Set, Router, Route } from '@redwoodjs/router' +import { Route, Router, Set, PrivateSet } from '@redwoodjs/router' import ApplicationLayout from 'src/layouts/ApplicationLayout' import { useAuth } from './auth' diff --git a/docs/docs/tutorial/chapter4/authentication.md b/docs/docs/tutorial/chapter4/authentication.md index 08bcc2228812..c42b3fb7d393 100644 --- a/docs/docs/tutorial/chapter4/authentication.md +++ b/docs/docs/tutorial/chapter4/authentication.md @@ -202,7 +202,7 @@ Going to the admin section now prevents a non-logged in user from seeing posts, ```jsx title="web/src/Routes.jsx" // highlight-next-line -import { Private, Router, Route, Set } from '@redwoodjs/router' +import { PrivateSet, Router, Route, Set } from '@redwoodjs/router' import ScaffoldLayout from 'src/layouts/ScaffoldLayout' import BlogLayout from 'src/layouts/BlogLayout' @@ -241,7 +241,7 @@ export default Routes ```jsx title="web/src/Routes.tsx" // highlight-next-line -import { Private, Router, Route, Set } from '@redwoodjs/router' +import { PrivateSet, Router, Route, Set } from '@redwoodjs/router' import ScaffoldLayout from 'src/layouts/ScaffoldLayout' import BlogLayout from 'src/layouts/BlogLayout' diff --git a/docs/docs/tutorial/chapter7/rbac.md b/docs/docs/tutorial/chapter7/rbac.md index 605409e86f0c..490b98a4d5ac 100644 --- a/docs/docs/tutorial/chapter7/rbac.md +++ b/docs/docs/tutorial/chapter7/rbac.md @@ -156,21 +156,21 @@ export const hasRole = (roles: AllowedRoles): boolean => { ### Restricting Access via Routes -The easiest way to prevent access to an entire URL is via the Router. The `` component takes a prop `roles` in which you can give a list of only those role(s) that should have access: +The easiest way to prevent access to an entire URL is via the Router. The `` component takes a prop `roles` in which you can give a list of only those role(s) that should have access: ```jsx title="web/src/Routes.jsx" // highlight-next-line - + - + ``` @@ -178,14 +178,14 @@ The easiest way to prevent access to an entire URL is via the Router. The ` + - + ``` diff --git a/docs/docusaurus.config.ts b/docs/docusaurus.config.ts index 2ec259ca2192..ff65baf284ae 100644 --- a/docs/docusaurus.config.ts +++ b/docs/docusaurus.config.ts @@ -34,7 +34,7 @@ const config: Config = { indexName: 'learn-redwood', contextualSearch: true, searchParameters: {}, - externalUrlRegex: 'https://learn-redwood.netlify.app', + // externalUrlRegex: 'https://learn-redwood.netlify.app', }, navbar: { title: 'RedwoodJS', diff --git a/docs/netlify.toml b/docs/netlify.toml index 2f07a9e47bc2..760ef715d5ad 100644 --- a/docs/netlify.toml +++ b/docs/netlify.toml @@ -214,7 +214,7 @@ to = "/docs/authentication#self-hosted-auth-installation-and-setup" status = 301 -# v1.0-v1.5 redirects (to v1.x) +# v1.0-v1.5 redirects [[redirects]] from = "/docs/1.0/*" @@ -246,7 +246,7 @@ to = "/docs/1.x/:splat" status = 301 -# v2.0-v2.2 redirects (to v2.x) +# v2.0-v2.2 redirects [[redirects]] from = "/docs/2.0/*" @@ -263,7 +263,7 @@ to = "/docs/2.x/:splat" status = 301 -# v3.0-v3.2 redirects (to v3.x) +# v3.0-v3.2 redirects [[redirects]] from = "/docs/3.0/*" @@ -280,6 +280,102 @@ to = "/docs/3.x/:splat" status = 301 +# v4 redirects + +[[redirects]] + from = "/docs/4.0/*" + to = "/docs/4.x/:splat" + status = 301 + +[[redirects]] + from = "/docs/4.1/*" + to = "/docs/4.x/:splat" + status = 301 + +[[redirects]] + from = "/docs/4.2/*" + to = "/docs/4.x/:splat" + status = 301 + +[[redirects]] + from = "/docs/4.3/*" + to = "/docs/4.x/:splat" + status = 301 + +[[redirects]] + from = "/docs/4.4/*" + to = "/docs/4.x/:splat" + status = 301 + +[[redirects]] + from = "/docs/4.5/*" + to = "/docs/4.x/:splat" + status = 301 + +# v5 redirects + +[[redirects]] + from = "/docs/5.0/*" + to = "/docs/5.x/:splat" + status = 301 + +[[redirects]] + from = "/docs/5.1/*" + to = "/docs/5.x/:splat" + status = 301 + +[[redirects]] + from = "/docs/5.2/*" + to = "/docs/5.x/:splat" + status = 301 + +[[redirects]] + from = "/docs/5.3/*" + to = "/docs/5.x/:splat" + status = 301 + +[[redirects]] + from = "/docs/5.4/*" + to = "/docs/5.x/:splat" + status = 301 + +# v6.0-v6.6 redirects + +[[redirects]] + from = "/docs/6.0/*" + to = "/docs/:splat" + status = 301 + +[[redirects]] + from = "/docs/6.1/*" + to = "/docs/:splat" + status = 301 + +[[redirects]] + from = "/docs/6.2/*" + to = "/docs/:splat" + status = 301 + +[[redirects]] + from = "/docs/6.3/*" + to = "/docs/:splat" + status = 301 + +[[redirects]] + from = "/docs/6.4/*" + to = "/docs/:splat" + status = 301 + +[[redirects]] + from = "/docs/6.5/*" + to = "/docs/:splat" + status = 301 + +[[redirects]] + from = "/docs/6.6/*" + to = "/docs/:splat" + status = 301 + # Redirects for "Configuring Fastify" after the server file was released in v7 [[redirects]] @@ -296,3 +392,10 @@ from = "/docs/app-configuration-redwood-toml#how-to-configure-fastify-to-accept-file-uploads" to = "/docs/docker#configuring-the-server" status = 301 + +# This doc was moved in v7 as a part of https://github.com/redwoodjs/redwood/pull/9416. + +[[redirects]] + from = "/docs/mocking-graphql-requests" + to = "/docs/canary/graphql/mocking-graphql-requests" + status = 301 diff --git a/docs/versioned_docs/version-6.0/seo-head.md b/docs/versioned_docs/version-6.0/seo-head.md deleted file mode 100644 index 4b50350c6cdd..000000000000 --- a/docs/versioned_docs/version-6.0/seo-head.md +++ /dev/null @@ -1,152 +0,0 @@ ---- -description: Use meta tags to set page info for SEO ---- - -# SEO & Meta tags - -## Add app title -You certainly want to change the title of your Redwood app. -You can start by adding or modify `title` inside `redwood.toml` - -```diff -[web] -- title = "Redwood App" -+ title = "My Cool App" - port = 8910 - apiUrl = "/.redwood/functions" -``` -This title (the app title) is used by default for all your pages if you don't define another one. -It will also be use for the title template ! -### Title template -Now that you have the app title set, you probably want some consistence with the page title, that's what the title template is for. - -Add `titleTemplate` as a prop for `RedwoodProvider` to have a title template for every pages - -In _web/src/App.\{tsx,js\}_ -```diff -- -+ - /* ... */ - -``` - -You can write the format you like. - -_Examples :_ -```jsx -"%PageTitle | %AppTitle" => "Home Page | Redwood App" - -"%AppTitle · %PageTitle" => "Redwood App · Home Page" - -"%PageTitle : %AppTitle" => "Home Page : Redwood App" -``` - -So now in your page you only need to write the title of the page. - -## Adding to page `` -So you want to change the title of your page, or add elements to the `` of the page? We've got you! - - -Let's say you want to change the title of your About page, -Redwood provides a built in `` component, which you can use like this - - -In _AboutPage/AboutPage.\{tsx,js\}_ -```diff -+import { Head } from '@redwoodjs/web' - -const AboutPage = () => { - return ( -
-

AboutPage

-+ -+ About the team -+ -``` - -You can include any valid `` tag in here that you like, but just to make things easier we also have a utility component [MetaTags](#setting-meta-tags-open-graph-directives). - -### What about nested tags? -Redwood uses [react-helmet-async](https://github.com/staylor/react-helmet-async) underneath, which will use the tags furthest down your component tree. - -For example, if you set title in your Layout, and a title in your Page, it'll render the one in Page - this way you can override the tags you wish, while sharing the tags defined in Layout. - - -> **Side note** -> for these headers to appear to bots and scrapers e.g. for twitter to show your title, you have to make sure your page is prerendered -> If your content is static you can use Redwood's built in [Prerender](prerender.md). For dynamic tags, check the [Dynamic head tags](#dynamic-tags) - -## Setting meta tags / open graph directives -Often we want to set more than just the title - most commonly to set "og" headers. Og standing for -[open graph](https://ogp.me/) of course. - -Redwood provides a convenience component `` to help you get all the relevant tags with one go (but you can totally choose to do them yourself) - -Here's an example setting some common headers, including how to set an `og:image` -```jsx -import { MetaTags } from '@redwoodjs/web' - -const AboutPage = () => { - return ( -
-

AboutPage

- -

This is the about page!

-
- ) -} - -export default AboutPage -``` - -This is great not just for link unfurling on say Facebook or Slack, but also for SEO. Take a look at the [source](https://github.com/redwoodjs/redwood/blob/main/packages/web/src/components/MetaTags.tsx#L83) if you're curious what tags get set here. - - -## Dynamic tags -Great - so far we can see the changes, and bots will pick up our tags if we've prerendered the page, but what if I want to set the header based on the output of the Cell? - -> **Prerendering cells**
-> As of v3.x, Redwood supports prerendering your [Cells](https://redwoodjs.com/docs/cells) with the data you were querying. For more information please refer [to this section](https://redwoodjs.com/docs/prerender#cell-prerendering). - - -Let's say in our PostCell, we want to set the title to match the Post. -```jsx -import Post from 'src/components/Post/Post' - -export const QUERY = gql` - query FindPostById($id: Int!) { - post: post(id: $id) { - title - snippet - author { - name - } - } - } -` - -export const Loading = /* ... */ - -export const Empty = /* ... */ - -export const Success = ({ post }) => { - return ( - <> - - - - ) -} -``` -Once the success component renders, it'll update your page's title and set the relevant meta tags for you! diff --git a/docs/versioned_docs/version-6.0/toast-notifications.md b/docs/versioned_docs/version-6.0/toast-notifications.md deleted file mode 100644 index 0dab206bd67b..000000000000 --- a/docs/versioned_docs/version-6.0/toast-notifications.md +++ /dev/null @@ -1,66 +0,0 @@ ---- -description: Toast notifications with react-hot-toast ---- - -# Toast Notifications - -Did you know that those little popup notifications that you sometimes see at the top of a page after you've performed an action are affectionately known as "toast" notifications? -Because they pop up like a piece of toast from a toaster! - -![Example toast animation](https://user-images.githubusercontent.com/300/110032806-71024680-7ced-11eb-8d69-7f462929815e.gif) - -Redwood supports these notifications out of the box thanks to the [react-hot-toast](https://react-hot-toast.com/) package. -We'll refer you to their [docs](https://react-hot-toast.com/docs) since they're very thorough, but here's enough to get you going. - -### Add the `Toaster` Component - -To render toast notifications, start by adding the `Toaster` component. -It's usually better to add it at the App or Layout-level than the Page: - -```jsx title="web/src/layouts/MainLayout/MainLayout.js" -// highlight-next-line -import { Toaster } from '@redwoodjs/web/toast' - -const MainLayout = ({ children }) => { - return ( - <> - // highlight-next-line - -
{children}
- - ) -} - -export default MainLayout -``` - -### Call the `toast` function - -To render a toast notification, call the `toast` function or one of its methods: - -```jsx title="web/src/components/PostForm/PostForm.js" -// highlight-next-line -import { toast } from '@redwoodjs/web/toast' - -// ... - -const PostForm = () => { - const onSubmit = () => { - try { - // Code to save a record... - // highlight-next-line - toast('User created!') - } catch (e) { - // There's also methods for default styling: - // highlight-next-line - toast.error("Error creating post...") - } - } - - return ( - // JSX... - ) -}) - -export default PostForm -``` diff --git a/docs/versioned_docs/version-6.0/a11y.md b/docs/versioned_docs/version-6.x/a11y.md similarity index 100% rename from docs/versioned_docs/version-6.0/a11y.md rename to docs/versioned_docs/version-6.x/a11y.md diff --git a/docs/versioned_docs/version-6.0/app-configuration-redwood-toml.md b/docs/versioned_docs/version-6.x/app-configuration-redwood-toml.md similarity index 99% rename from docs/versioned_docs/version-6.0/app-configuration-redwood-toml.md rename to docs/versioned_docs/version-6.x/app-configuration-redwood-toml.md index c009b12d1ac1..fee88a7b9cdc 100644 --- a/docs/versioned_docs/version-6.0/app-configuration-redwood-toml.md +++ b/docs/versioned_docs/version-6.x/app-configuration-redwood-toml.md @@ -290,7 +290,7 @@ api | 🗒 Custom api | "--------------------------e66d9a27b7c2b271\r\nContent-Disposition: attachment; name=\"image\"; filename=\"favicon.png\"\r\nContent-Type: image/png\r\n\r\n�PNG\r\n\u001a\n\u0000\u0000\u0000\rIHDR\u0000\u0000\u0000 \u0000\u0000\u0000`�\r\n--------------------------e66d9a27b7c2b271--\r\n" ``` -:::caution File uploads only work in a serverful deploy +:::warning File uploads only work in a serverful deploy Serverless functions on Netlify or Vercel do not use this Fastify configuration. They also have memory and execution time limits that don't lend themselves to handling file uploads of any practical size. diff --git a/docs/versioned_docs/version-6.0/assets-and-files.md b/docs/versioned_docs/version-6.x/assets-and-files.md similarity index 100% rename from docs/versioned_docs/version-6.0/assets-and-files.md rename to docs/versioned_docs/version-6.x/assets-and-files.md diff --git a/docs/versioned_docs/version-6.0/auth/auth0.md b/docs/versioned_docs/version-6.x/auth/auth0.md similarity index 94% rename from docs/versioned_docs/version-6.0/auth/auth0.md rename to docs/versioned_docs/version-6.x/auth/auth0.md index b7a47f6f1ae2..a028f9418152 100644 --- a/docs/versioned_docs/version-6.0/auth/auth0.md +++ b/docs/versioned_docs/version-6.x/auth/auth0.md @@ -28,7 +28,7 @@ But where in your Redwood app exactly? Auth0 needs to know, and this setting tells it. We'll keep things simple for now and make it "http://localhost:8910", but feel free to configure it as you wish. -Paste "http://localhost:8910" in the text area below "Allowed Callback URLs", then click "Save Changes" at the bottom of the page. +Paste "http://localhost:8910" in the text areas below "Allowed Callback URLs", "Allowed Logout URLs" and "Allowed Web Origins" then click "Save Changes" at the bottom of the page. Copy this one over to your project's `.env` file too, as `AUTH0_REDIRECT_URI`. Ok, just one more to go: under "Applications" in the nav on the left, click "APIs". diff --git a/docs/versioned_docs/version-6.0/auth/azure.md b/docs/versioned_docs/version-6.x/auth/azure.md similarity index 100% rename from docs/versioned_docs/version-6.0/auth/azure.md rename to docs/versioned_docs/version-6.x/auth/azure.md diff --git a/docs/versioned_docs/version-6.0/auth/clerk.md b/docs/versioned_docs/version-6.x/auth/clerk.md similarity index 93% rename from docs/versioned_docs/version-6.0/auth/clerk.md rename to docs/versioned_docs/version-6.x/auth/clerk.md index ed71df136f8e..06266d0b4774 100644 --- a/docs/versioned_docs/version-6.0/auth/clerk.md +++ b/docs/versioned_docs/version-6.x/auth/clerk.md @@ -4,7 +4,7 @@ sidebar_label: Clerk # Clerk Authentication -:::caution Did you set up Clerk a while ago? +:::warning Did you set up Clerk a while ago? If you set up Clerk a while ago, you may be using a deprecated `authDecoder` that's subject to rate limiting. This decoder will be removed in the next major. @@ -56,10 +56,15 @@ Lastly, in your project's `redwood.toml` file, include `CLERK_PUBLISHABLE_KEY` i ``` That should be enough; now, things should just work. -Let's make sure: if this is a brand new project, generate a home page. +Let's make sure: if this is a brand new project, generate a home page: + +```bash +yarn rw g page Home / +``` + There we'll try to sign up by destructuring `signUp` from the `useAuth` hook (import that from `'src/auth'`). We'll also destructure and display `isAuthenticated` to see if it worked: -```tsx title="web/src/pages/HomePage.tsx" +```tsx title="web/src/pages/HomePage/HomePage.tsx" import { useAuth } from 'src/auth' const HomePage = () => { @@ -76,11 +81,8 @@ const HomePage = () => { } ``` -Clicking sign up should open a sign-up box: - -image +Clicking sign up should open a sign-up box and after you sign up, you should see `{"isAuthenticated":true}` on the page. -After you sign up, you should see `{"isAuthenticated":true}` on the page. ## Customizing the session token diff --git a/docs/versioned_docs/version-6.0/auth/custom.md b/docs/versioned_docs/version-6.x/auth/custom.md similarity index 100% rename from docs/versioned_docs/version-6.0/auth/custom.md rename to docs/versioned_docs/version-6.x/auth/custom.md diff --git a/docs/versioned_docs/version-6.0/auth/dbauth.md b/docs/versioned_docs/version-6.x/auth/dbauth.md similarity index 99% rename from docs/versioned_docs/version-6.0/auth/dbauth.md rename to docs/versioned_docs/version-6.x/auth/dbauth.md index 01d8c1d28f1e..b87d4cf3a9f3 100644 --- a/docs/versioned_docs/version-6.0/auth/dbauth.md +++ b/docs/versioned_docs/version-6.x/auth/dbauth.md @@ -337,7 +337,7 @@ yarn rw g secret ``` Note that the secret that's output is _not_ appended to your `.env` file or anything else, it's merely output to the screen. You'll need to put it in the right place after that. -:::caution .env and Version Control +:::warning .env and Version Control The `.env` file is set to be ignored by git and not committed to version control. There is another file, `.env.defaults`, which is meant to be safe to commit and contain simple ENV vars that your dev team can share. The encryption key for the session cookie is NOT one of these shareable vars! @@ -475,7 +475,7 @@ model UserCredential { Run `yarn rw prisma migrate dev` to apply the changes to your database. -:::caution Do Not Allow GraphQL Access to `UserCredential` +:::warning Do Not Allow GraphQL Access to `UserCredential` As you can probably tell by the name, this new model contains secret credential info for the user. You **should not** make this data publicly available by adding an SDL file to `api/src/graphql`. diff --git a/docs/versioned_docs/version-6.0/auth/firebase.md b/docs/versioned_docs/version-6.x/auth/firebase.md similarity index 100% rename from docs/versioned_docs/version-6.0/auth/firebase.md rename to docs/versioned_docs/version-6.x/auth/firebase.md diff --git a/docs/versioned_docs/version-6.0/auth/netlify.md b/docs/versioned_docs/version-6.x/auth/netlify.md similarity index 100% rename from docs/versioned_docs/version-6.0/auth/netlify.md rename to docs/versioned_docs/version-6.x/auth/netlify.md diff --git a/docs/versioned_docs/version-6.0/auth/supabase.md b/docs/versioned_docs/version-6.x/auth/supabase.md similarity index 100% rename from docs/versioned_docs/version-6.0/auth/supabase.md rename to docs/versioned_docs/version-6.x/auth/supabase.md diff --git a/docs/versioned_docs/version-6.0/auth/supertokens.md b/docs/versioned_docs/version-6.x/auth/supertokens.md similarity index 58% rename from docs/versioned_docs/version-6.0/auth/supertokens.md rename to docs/versioned_docs/version-6.x/auth/supertokens.md index d7d37bd0d740..8b0b6b97ca8a 100644 --- a/docs/versioned_docs/version-6.0/auth/supertokens.md +++ b/docs/versioned_docs/version-6.x/auth/supertokens.md @@ -11,18 +11,49 @@ yarn rw setup auth supertokens ``` This installs all the packages, writes all the files, and makes all the code modifications you need. + +:::info + +You may have noticed that in `api/src/functions/auth.ts` there's an import from `'supertokens-node/framework/awsLambda'`. This is fine, even if your app isn't running in a serverless environment like AWS Lambda. In "serverful" environments, Redwood automatically handles the translation between Fastify's request and reply objects and functions' AWS Lambda signature. + +::: + For a detailed explanation of all the api- and web-side changes that aren't exclusive to SuperTokens, see the top-level [Authentication](../authentication.md) doc. For now, let's focus on SuperTokens's side of things. When you run the setup command it configures your app to support both email+password logins as well as social auth logins (Apple, GitHub and Google). Working with those social auth logins does require quite a few environment variables. And SuperTokens itself needs a couple variables too. Thankfully SuperTokens makes this very easy to setup as they provide values we can use for testing. -So just copy this to your project's `.env` file. +# Environment variables -```bash title=".env" +The environment variables have to be added either to your project's `.env` file (when running in development environment), or to the environment variables of your hosting provider (when running in production). + +## Base setup + +```bash +SUPERTOKENS_APP_NAME="Redwoodjs App" # this will be used in the email template for password reset or email verification emails. SUPERTOKENS_JWKS_URL=http://localhost:8910/.redwood/functions/auth/jwt/jwks.json +SUPERTOKENS_CONNECTION_URI=https://try.supertokens.io # set to the correct connection uri +``` + +## Production setup + +Assuming that your web side is hosted on `https://myapp.com`: + +```bash +SUPERTOKENS_WEBSITE_DOMAIN=https://myapp.com +SUPERTOKENS_JWKS_URL=https://myapp.com/.redwood/functions/auth/jwt/jwks.json +``` -SUPERTOKENS_CONNECTION_URI=https://try.supertokens.io +## Managed Supertokens service setup +```bash +SUPERTOKENS_API_KEY=your-api-key # The value can be omitted when self-hosting Supertokens +``` + +## Social login setup +The following environment variables have to be set up (depending on the social login options): + +```bash SUPERTOKENS_APPLE_CLIENT_ID=4398792-io.supertokens.example.service SUPERTOKENS_APPLE_SECRET_KEY_ID=7M48Y4RYDL SUPERTOKENS_APPLE_SECRET_PRIVATE_KEY=-----BEGIN PRIVATE KEY-----\nMIGTAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBHkwdwIBAQQgu8gXs+XYkqXD6Ala9Sf/iJXzhbwcoG5dMh1OonpdJUmgCgYIKoZIzj0DAQehRANCAASfrvlFbFCYqn3I2zeknYXLwtH30JuOKestDbSfZYxZNMqhF/OzdZFTV0zc5u5s3eN+oCWbnvl0hM+9IW0UlkdA\n-----END PRIVATE KEY----- @@ -33,7 +64,24 @@ SUPERTOKENS_GOOGLE_CLIENT_ID=1060725074195-kmeum4crr01uirfl2op9kd5acmi9jutn.apps SUPERTOKENS_GOOGLE_CLIENT_SECRET=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 ``` -That should be enough; now, things should just work. +## `redwood.toml` setup + +Make sure to modify `redwood.toml` to pass the required environment variables to the web side: + +```toml +[web] +... +includeEnvironmentVariables = [ + 'SUPERTOKENS_WEBSITE_DOMAIN', + 'SUPERTOKENS_API_DOMAIN', + 'SUPERTOKENS_API_GATEWAY_PATH', + 'SUPERTOKENS_APP_NAME' +] +``` + + +# Page setup + Let's make sure: if this is a brand new project, generate a home page. There we'll try to sign up by destructuring `signUp` from the `useAuth` hook (import that from `'src/auth'`). We'll also destructure and display `isAuthenticated` to see if it worked: @@ -65,3 +113,7 @@ Clicking sign up should navigate you to `/auth` where SuperToken's default login SuperTokens default UI After you sign up, you should be redirected back to your Redwood app, and you should see `{"isAuthenticated":true}` on the page. + +## Troubleshooting + +If going to `http://localhost:8910/auth` results in the plain Javascript file being served instead of the expected auth page, rename the `web/src/auth.tsx` file to `web/src/authentication.tsx`, and update the imports (related to https://github.com/redwoodjs/redwood/issues/9740). diff --git a/docs/versioned_docs/version-6.0/authentication.md b/docs/versioned_docs/version-6.x/authentication.md similarity index 96% rename from docs/versioned_docs/version-6.0/authentication.md rename to docs/versioned_docs/version-6.x/authentication.md index d026aa91bb17..88506057eb2f 100644 --- a/docs/versioned_docs/version-6.0/authentication.md +++ b/docs/versioned_docs/version-6.x/authentication.md @@ -5,7 +5,7 @@ description: Set up an authentication provider # Authentication Redwood has integrated auth end to end, from the web side to the api side. -On the web side, the router can protect pages via the `Private` component (or the `Set` component via the `private` prop), and even restrict access at the role-level. +On the web side, the router can protect pages via the `PrivateSet` component, and even restrict access at the role-level. And if you'd prefer to work with the primitives, the `useAuth` hook exposes all the pieces to build the experience you want. Likewise, the api side is locked down by default: all SDLs are generated with the `@requireAuth` directive, ensuring that making things publicly available is something that you opt in to rather than out of. @@ -129,10 +129,8 @@ const Routes = () => { - // highlight-start + // highlight-next-line - {/* Or... */} - // highlight-end @@ -144,7 +142,7 @@ const Routes = () => { You can also restrict access by role by passing a role or an array of roles to the `PrivateSet` component's `hasRole` prop: ```tsx title="web/src/Routes.tsx" -import { Router, Route, PrivateSet, Set } from '@redwoodjs/router' +import { Router, Route, PrivateSet } from '@redwoodjs/router' const Routes = () => { return ( @@ -158,9 +156,9 @@ const Routes = () => { // highlight-next-line - + - + // highlight-next-line diff --git a/docs/versioned_docs/version-6.0/builds.md b/docs/versioned_docs/version-6.x/builds.md similarity index 100% rename from docs/versioned_docs/version-6.0/builds.md rename to docs/versioned_docs/version-6.x/builds.md diff --git a/docs/versioned_docs/version-6.0/cells.md b/docs/versioned_docs/version-6.x/cells.md similarity index 98% rename from docs/versioned_docs/version-6.0/cells.md rename to docs/versioned_docs/version-6.x/cells.md index 20c92ace2f37..0a7377cd6b54 100644 --- a/docs/versioned_docs/version-6.0/cells.md +++ b/docs/versioned_docs/version-6.x/cells.md @@ -409,4 +409,4 @@ export const Cell = () => { That's a lot of code. A lot of imperative code too. -We're basically just dumping the contents of [createCell.tsx](https://github.com/redwoodjs/redwood/blob/main/packages/web/src/components/createCell.tsx) into this file. Can you imagine having to do this every time you wanted to fetch data that might be delayed in responding? Yikes. +We're basically just dumping the contents of [createCell.tsx](https://github.com/redwoodjs/redwood/blob/main/packages/web/src/components/cell/createCell.tsx) into this file. Can you imagine having to do this every time you wanted to fetch data that might be delayed in responding? Yikes. diff --git a/docs/versioned_docs/version-6.0/cli-commands.md b/docs/versioned_docs/version-6.x/cli-commands.md similarity index 97% rename from docs/versioned_docs/version-6.0/cli-commands.md rename to docs/versioned_docs/version-6.x/cli-commands.md index c6fa2b18688c..0215aaa8b2ea 100644 --- a/docs/versioned_docs/version-6.0/cli-commands.md +++ b/docs/versioned_docs/version-6.x/cli-commands.md @@ -290,7 +290,7 @@ The following command will build, apply Prisma DB migrations, and skip data migr yarn redwood deploy netlify --no-data-migrate ``` -:::caution +:::warning While you may be tempted to use the [Netlify CLI](https://cli.netlify.com) commands to [build](https://cli.netlify.com/commands/build) and [deploy](https://cli.netlify.com/commands/deploy) your project directly from you local project directory, doing so **will lead to errors when deploying and/or when running functions**. I.e. errors in the function needed for the GraphQL server, but also other serverless functions. The main reason for this is that these Netlify CLI commands simply build and deploy -- they build your project locally and then push the dist folder. That means that when building a RedwoodJS project, the [Prisma client is generated with binaries matching the operating system at build time](https://cli.netlify.com/commands/link) -- and not the [OS compatible](https://www.prisma.io/docs/reference/api-reference/prisma-schema-reference#binarytargets-options) with running functions on Netlify. Your Prisma client engine may be `darwin` for OSX or `windows` for Windows, but it needs to be `debian-openssl-1.1.x` or `rhel-openssl-1.1.x`. If the client is incompatible, your functions will fail. @@ -771,7 +771,7 @@ $ /redwood-app/node_modules/.bin/redwood g layout user Done in 1.00s. ``` -A layout will just export it's children: +A layout will just export its children: ```jsx title="./web/src/layouts/UserLayout/UserLayout.test.js" const UserLayout = ({ children }) => { @@ -1722,6 +1722,7 @@ yarn redwood setup | `deploy` | Set up a deployment configuration for a provider | | `generator` | Copy default Redwood generator templates locally for customization | | `i18n` | Set up i18n | +| `package` | Peform setup actions by running a third-party npm package | | `tsconfig` | Add relevant tsconfig so you can start using TypeScript | | `ui` | Set up a UI design or style library | | `webpack` | Set up a webpack config file in your project so you can add custom config | @@ -1779,7 +1780,7 @@ yarn redwood setup cache ### setup custom-web-index -:::caution This command only applies to projects using Webpack +:::warning This command only applies to projects using Webpack As of v6, all Redwood projects use Vite by default. When switching projects to Vite, we made the decision to add the the entry file, `web/src/entry.client.{jsx,tsx}`, back to projects. @@ -1903,6 +1904,51 @@ In order to use [Netlify Dev](https://www.netlify.com/products/dev/) you need to > Note: To detect the RedwoodJS framework, please use netlify-cli v3.34.0 or greater. +### setup mailer + +This command adds the necessary packages and files to get started using the RedwoodJS mailer. By default it also creates an example mail template which can be skipped with the `--skip-examples` flag. + +``` +yarn redwood setup mailer +``` + +| Arguments & Options | Description | +| :---------------------- | :----------------------------- | +| `--force, -f` | Overwrite existing files | +| `--skip-examples` | Do not include example content, such as a React email template | + +### setup package + +This command takes a published npm package that you specify, performs some compatibility checks, and then executes its bin script. This allows you to use third-party packages that can provide you with an easy-to-use setup command for the particular functionality they provide. + +This command behaves similarly to `yarn dlx` but will attempt to confirm compatibility between the package you are attempting to run and the current version of Redwood you are running. You can bypass this check by passing the `--force` flag if you feel you understand any potential compatibility issues. + +``` +yarn redwood setup package +``` + +| Arguments & Options | Description | +| :------------------ | :----------------------- | +| `--force, -f` | Forgo compatibility checks | + +**Usage** + +Run the made up `@redwoodjs/setup-example` package: +```bash +~/redwood-app$ yarn rw setup package @redwoodjs/setup-example +``` + +Run the same package but using a particular npm tag and avoiding any compatibility checks: +```bash +~/redwood-app$ yarn rw setup package @redwoodjs/setup-example@beta --force +``` + +**Compatibility Checks** + +We perform a simple compatibility check in an attempt to make you aware of potential compatibility issues with setup packages you might wish to run. This works by examining the version of `@redwoodjs/core` you are using within your root `package.json`. We compare this value with a compatibility range the npm package specifies in the `engines.redwoodjs` field of its own `package.json`. If the version of `@redwoodjs/core` you are using falls outside of the compatibility range specified by the package you are attempting to run, we will warn you and ask you to confirm that you wish to continue. + +It's the author of the npm package's responsibility to specify the correct compatibility range, so **you should always research the packages you use with this command**. Especially since they will be executing code on your machine! + ### setup tsconfig Add a `tsconfig.json` to both the web and api sides so you can start using [TypeScript](typescript/index). diff --git a/docs/versioned_docs/version-6.0/connection-pooling.md b/docs/versioned_docs/version-6.x/connection-pooling.md similarity index 100% rename from docs/versioned_docs/version-6.0/connection-pooling.md rename to docs/versioned_docs/version-6.x/connection-pooling.md diff --git a/docs/versioned_docs/version-6.0/contributing-overview.md b/docs/versioned_docs/version-6.x/contributing-overview.md similarity index 100% rename from docs/versioned_docs/version-6.0/contributing-overview.md rename to docs/versioned_docs/version-6.x/contributing-overview.md diff --git a/docs/versioned_docs/version-6.0/contributing-walkthrough.md b/docs/versioned_docs/version-6.x/contributing-walkthrough.md similarity index 100% rename from docs/versioned_docs/version-6.0/contributing-walkthrough.md rename to docs/versioned_docs/version-6.x/contributing-walkthrough.md diff --git a/docs/versioned_docs/version-6.0/cors.md b/docs/versioned_docs/version-6.x/cors.md similarity index 100% rename from docs/versioned_docs/version-6.0/cors.md rename to docs/versioned_docs/version-6.x/cors.md diff --git a/docs/versioned_docs/version-6.0/create-redwood-app.md b/docs/versioned_docs/version-6.x/create-redwood-app.md similarity index 95% rename from docs/versioned_docs/version-6.0/create-redwood-app.md rename to docs/versioned_docs/version-6.x/create-redwood-app.md index 2f1e600f5eda..c6244ac0e837 100644 --- a/docs/versioned_docs/version-6.0/create-redwood-app.md +++ b/docs/versioned_docs/version-6.x/create-redwood-app.md @@ -24,7 +24,7 @@ node -v If you need to update your version of Node or run multiple versions of Node, we recommend installing nvm and have [documentation about how to get up and running.](./how-to/using-nvm) -You also need to have yarn version 1.15 or higher installed. To see what version of yarn you're running, you can run the following command in your terminal: +You also need to have yarn version 1.22.21 or higher installed. To see what version of yarn you're running, you can run the following command in your terminal: ```terminal yarn -v diff --git a/docs/versioned_docs/version-6.0/custom-web-index.md b/docs/versioned_docs/version-6.x/custom-web-index.md similarity index 96% rename from docs/versioned_docs/version-6.0/custom-web-index.md rename to docs/versioned_docs/version-6.x/custom-web-index.md index 775fa2c891a8..8fd30f548856 100644 --- a/docs/versioned_docs/version-6.0/custom-web-index.md +++ b/docs/versioned_docs/version-6.x/custom-web-index.md @@ -4,7 +4,7 @@ description: Change how App mounts to the DOM # Custom Web Index -:::caution This doc only applies to projects using Webpack +:::warning This doc only applies to projects using Webpack As of v6, all Redwood projects use Vite by default. When switching projects to Vite, we made the decision to add the the entry file, `web/src/entry.client.{jsx,tsx}`, back to projects. diff --git a/docs/versioned_docs/version-6.0/data-migrations.md b/docs/versioned_docs/version-6.x/data-migrations.md similarity index 100% rename from docs/versioned_docs/version-6.0/data-migrations.md rename to docs/versioned_docs/version-6.x/data-migrations.md diff --git a/docs/versioned_docs/version-6.0/deploy/baremetal.md b/docs/versioned_docs/version-6.x/deploy/baremetal.md similarity index 97% rename from docs/versioned_docs/version-6.0/deploy/baremetal.md rename to docs/versioned_docs/version-6.x/deploy/baremetal.md index 4703a7910a37..aec882f28381 100644 --- a/docs/versioned_docs/version-6.0/deploy/baremetal.md +++ b/docs/versioned_docs/version-6.x/deploy/baremetal.md @@ -22,7 +22,7 @@ Subsequent deploys: yarn rw deploy baremetal production ``` -:::caution Deploying to baremetal is an advanced topic +:::warning Deploying to baremetal is an advanced topic If you haven't done any kind of remote server work before, you may be in a little over your head to start with. But don't worry: until relatively recently (cloud computing, serverless, lambda functions) this is how all websites were deployed, so we've got a good 30 years of experience getting this working! @@ -173,7 +173,7 @@ This lists a single server, in the `production` environment, providing the hostn * `branch` - [optional] The branch to deploy (defaults to `main`) * `keepReleases` - [optional] The number of previous releases to keep on the server, including the one currently being served (defaults to 5) -The easiest connection method is generally to include your own public key in the server's `~/.ssh/authorized_keys` file, [enable agent forwarding](https://docs.github.com/en/developers/overview/using-ssh-agent-forwarding), and then set `agentForward = true` in `deploy.toml`. This will allow you to use your own credentials when pulling code from GitHub (required for private repos). Otherwise you can create a [deploy key](https://docs.github.com/en/developers/overview/managing-deploy-keys) and keep it on the server. +The easiest connection method is generally to include your own public key in the server's `~/.ssh/authorized_keys` mannually or by running `ssh-copy-id user@server.com` from your local machine, [enable agent forwarding](https://docs.github.com/en/developers/overview/using-ssh-agent-forwarding), and then set `agentForward = true` in `deploy.toml`. This will allow you to use your own credentials when pulling code from GitHub (required for private repos). Otherwise you can create a [deploy key](https://docs.github.com/en/developers/overview/managing-deploy-keys) and keep it on the server. #### Using Environment Variables in `deploy.toml` @@ -274,7 +274,7 @@ sudo chown deploy:deploy /var/www/myapp You'll want to create an `.env` file in this directory containing any environment variables that are needed by your app (like `DATABASE_URL` at a minimum). This will be symlinked to each release directory so that it's available as the app expects (in the root directory of the codebase). -:::caution SSH and Non-interactive Sessions +:::warning SSH and Non-interactive Sessions The deployment process uses a '[non-interactive](https://tldp.org/LDP/abs/html/intandnonint.html)' SSH session to run commands on the remote server. A non-interactive session will often load a minimal amount of settings for better compatibility and speed. In some versions of Linux `.bashrc` by default does not load (by design) from a non-interactive session. This can lead to `yarn` (or other commands) not being found by the deployment script, even though they are in your path, because additional ENV vars are set in `~/.bashrc` which provide things like NPM paths and setup. @@ -418,7 +418,7 @@ pm2 startup You will see some output similar to the output below. We care about the output after "copy/paste the following command:" You'll need to do just that: copy the command starting with `sudo` and then paste and execute it. *Note* this command uses `sudo` so you'll need the root password to the machine in order for it to complete successfully. -:::caution +:::warning The below text is *example* output, yours will be different, don't copy and paste ours! @@ -467,7 +467,7 @@ You can define your before/after commands in three different places: * Environment specific - runs for only a single environment * Server specific - runs for only a single server in a single environment -:::caution +:::warning Custom commands are run in the new **deploy** directory, not the root of your application directory. During a deploy the `current` symlink will point to the previous directory while your code is executed in the new one, before the `current` symlink location is updated. diff --git a/docs/versioned_docs/version-6.0/deploy/coherence.md b/docs/versioned_docs/version-6.x/deploy/coherence.md similarity index 97% rename from docs/versioned_docs/version-6.0/deploy/coherence.md rename to docs/versioned_docs/version-6.x/deploy/coherence.md index 970eaa98fa31..a2b9ec845d2a 100644 --- a/docs/versioned_docs/version-6.0/deploy/coherence.md +++ b/docs/versioned_docs/version-6.x/deploy/coherence.md @@ -17,7 +17,7 @@ To deploy to Coherence, your Redwood project needs to be hosted on GitHub and yo ## Coherence Deploy -:::caution Prerender doesn't work with Coherence yet +:::warning Prerender doesn't work with Coherence yet You can see its current status and follow updates here on GitHub: https://github.com/redwoodjs/redwood/issues/8333. diff --git a/docs/versioned_docs/version-6.0/deploy/edgio.md b/docs/versioned_docs/version-6.x/deploy/edgio.md similarity index 100% rename from docs/versioned_docs/version-6.0/deploy/edgio.md rename to docs/versioned_docs/version-6.x/deploy/edgio.md diff --git a/docs/versioned_docs/version-6.0/deploy/flightcontrol.md b/docs/versioned_docs/version-6.x/deploy/flightcontrol.md similarity index 100% rename from docs/versioned_docs/version-6.0/deploy/flightcontrol.md rename to docs/versioned_docs/version-6.x/deploy/flightcontrol.md diff --git a/docs/versioned_docs/version-6.0/deploy/introduction.md b/docs/versioned_docs/version-6.x/deploy/introduction.md similarity index 100% rename from docs/versioned_docs/version-6.0/deploy/introduction.md rename to docs/versioned_docs/version-6.x/deploy/introduction.md diff --git a/docs/versioned_docs/version-6.0/deploy/netlify.md b/docs/versioned_docs/version-6.x/deploy/netlify.md similarity index 99% rename from docs/versioned_docs/version-6.0/deploy/netlify.md rename to docs/versioned_docs/version-6.x/deploy/netlify.md index 4f5c87e217bd..ad62b9b5d8f5 100644 --- a/docs/versioned_docs/version-6.0/deploy/netlify.md +++ b/docs/versioned_docs/version-6.x/deploy/netlify.md @@ -13,7 +13,7 @@ If you simply want to experience the Netlify deployment process without a databa 3. run the command `yarn rw setup deploy netlify` and commit and push changes 4. use the Netlify [Quick Start](https://app.netlify.com/signup) to deploy -:::caution +:::warning While you may be tempted to use the [Netlify CLI](https://cli.netlify.com) commands to [build](https://cli.netlify.com/commands/build) and [deploy](https://cli.netlify.com/commands/deploy) your project directly from you local project directory, doing so **will lead to errors when deploying and/or when running functions**. I.e. errors in the function needed for the GraphQL server, but also other serverless functions. The main reason for this is that these Netlify CLI commands simply build and deploy -- they build your project locally and then push the dist folder. That means that when building a RedwoodJS project, the [Prisma client is generated with binaries matching the operating system at build time](https://cli.netlify.com/commands/link) -- and not the [OS compatible](https://www.prisma.io/docs/reference/api-reference/prisma-schema-reference#binarytargets-options) with running functions on Netlify. Your Prisma client engine may be `darwin` for OSX or `windows` for Windows, but it needs to be `debian-openssl-1.1.x` or `rhel-openssl-1.1.x`. If the client is incompatible, your functions will fail. diff --git a/docs/versioned_docs/version-6.0/deploy/render.md b/docs/versioned_docs/version-6.x/deploy/render.md similarity index 100% rename from docs/versioned_docs/version-6.0/deploy/render.md rename to docs/versioned_docs/version-6.x/deploy/render.md diff --git a/docs/versioned_docs/version-6.0/deploy/serverless.md b/docs/versioned_docs/version-6.x/deploy/serverless.md similarity index 100% rename from docs/versioned_docs/version-6.0/deploy/serverless.md rename to docs/versioned_docs/version-6.x/deploy/serverless.md diff --git a/docs/versioned_docs/version-6.0/deploy/vercel.md b/docs/versioned_docs/version-6.x/deploy/vercel.md similarity index 88% rename from docs/versioned_docs/version-6.0/deploy/vercel.md rename to docs/versioned_docs/version-6.x/deploy/vercel.md index 38beee72ec6d..5f4e6e33fe04 100644 --- a/docs/versioned_docs/version-6.0/deploy/vercel.md +++ b/docs/versioned_docs/version-6.x/deploy/vercel.md @@ -73,3 +73,18 @@ Go ahead, click that "Visit" button. You’ve earned it 🎉 From the Vercel Dashboard you can access the full settings and information for your Redwood App. The default settings seem to work just fine for most Redwood projects. Do take a look around, but be sure check out the [docs as well](https://vercel.com/docs). From now on, each time you push code to your git repo, Vercel will automatically trigger a deploy of the new code. You can also manually redeploy if you select "Deployments", then the specific deployment from the list, and finally the "Redeploy" option from the vertical dots menu next to "Visit". + +## vercel.json configuration + +By default, API requests in Vercel have a timeout limit of 15 seconds. To extend this duration, you can modify the vercel.json file by inserting the code snippet provided below. Please be aware that the ability to increase the timeout limit is exclusive to Pro plan subscribers. Additionally, it is important to note that the timeout can be increased up to a maximum of 300 seconds, which is equivalent to 5 minutes. + +``` +{ + "functions": { + "api/src/functions/graphql.*": { + "maxDuration": 120, + "runtime": "@vercel/redwood@2.0.5" + } + } +} +``` diff --git a/docs/versioned_docs/version-6.0/directives.md b/docs/versioned_docs/version-6.x/directives.md similarity index 100% rename from docs/versioned_docs/version-6.0/directives.md rename to docs/versioned_docs/version-6.x/directives.md diff --git a/docs/versioned_docs/version-6.x/docker.md b/docs/versioned_docs/version-6.x/docker.md new file mode 100644 index 000000000000..2a3587bba87a --- /dev/null +++ b/docs/versioned_docs/version-6.x/docker.md @@ -0,0 +1,468 @@ +--- +description: Redwood's Dockerfile +--- + +# Docker + +:::note The Dockerfile is experimental + +Redwood's Dockerfile is the collective effort of several hard-working community members. +We've worked hard to optimize it, but expect changes as we collaborate with users and deploy providers. + +::: + +If you're not familiar with Docker, we recommend going through their [getting started](https://docs.docker.com/get-started/) documentation. + +## Set up + +To get started, run the setup command: + +``` +yarn rw experimental setup-docker +``` + +The setup commands does several things: +- writes four files: `Dockerfile`, `.dockerignore`, `docker-compose.dev.yml`, and `docker-compose.prod.yml` +- adds the `@redwoodjs/api-server` and `@redwoodjs/web-server` packages to the api and web sides respectively +- edits the `browser.open` setting in the `redwood.toml` (right now, if it's set to `true`, it'll break the dev server when running the `docker-compose.dev.yml`) + +## Usage + +You can start the dev compose file with: + +``` +docker compose -f ./docker-compose.dev.yml up +``` + +And the prod compose file with: + +``` +docker compose -f ./docker-compose.prod.yml up +``` + +:::info make sure to specify build args + +If your api side or web side depend on env vars at build time, you may need to supply them as `--build-args`, or in the compose files. + +This is often the most tedious part of setting up Docker. Have ideas of how it could be better? Let us know on the [forums](https://community.redwoodjs.com/)! + +::: + +The first time you do this, you'll have to use the `console` stage to go in and migrate the database—just like you would with a Redwood app on your machine: + +``` +docker compose -f ./docker-compose.dev.yml run --rm -it console /bin/bash +root@...:/home/node/app# yarn rw prisma migrate dev +``` + +## The Dockerfile in detail + +The documentation here goes through and explains every line of Redwood's Dockerfile. +If you'd like to see the whole Dockerfile for reference, you can find it [here](https://github.com/redwoodjs/redwood/tree/main/packages/cli/src/commands/experimental/templates/docker/Dockerfile) or by setting it up in your project: `yarn rw experimental setup-docker`. + +Redwood takes advantage of [Docker's multi-stage build support](https://docs.docker.com/build/building/multi-stage/) to keep the final production images lean. + +### The `base` stage + +The `base` stage installs dependencies. +It's used as the base image for the build stages and the `console` stage. + +```Dockerfile +FROM node:18-bookworm-slim as base +``` + +We use a Node.js 18 image as the base image because that's the version Redwood targets. +"bookworm" is the codename for the current stable distribution of Debian (version 12). +Lastly, the "slim" variant of the `node:18-bookworm` image only includes what Node.js needs which reduces the image's size while making it more secure. + +:::tip Why not alpine? + +While alpine may be smaller, it uses musl, a different C standard library. +In developing this Dockerfile, we prioritized security over size. + +If you know what you're doing feel free to change this—it's your Dockerfile now! +Just remember to change the `apt-get` instructions further down too if needed. + +::: + +Moving on, next we have `corepack enable`: + +```Dockerfile +RUN corepack enable +``` + +[Corepack](https://nodejs.org/docs/latest-v18.x/api/corepack.html), Node's manager for package managers, needs to be enabled so that Yarn can use the `packageManager` field in your project's root `package.json` to pick the right version of itself. +If you'd rather check in the binary, you still can, but you'll need to remember to copy it over (i.e. `COPY --chown=node:node .yarn/releases .yarn/releases`). + +```Dockerfile +RUN apt-get update && apt-get install -y \ + openssl \ + # python3 make gcc \ + && rm -rf /var/lib/apt/lists/* +``` + +The `node:18-bookworm-slim` image doesn't have [OpenSSL](https://www.openssl.org/), which [seems to be a bug](https://github.com/nodejs/docker-node/issues/1919). +(It was included in the "bullseye" image, the codename for Debian 11.) +On Linux, [Prisma needs OpenSSL](https://www.prisma.io/docs/reference/system-requirements#linux-runtime-dependencies), so we install it here via Ubuntu's package manager APT. +Python and its dependencies are there ready to be uncommented if you need them. See the [Troubleshooting](#python) section for more information. + +[It's recommended](https://docs.docker.com/develop/develop-images/instructions/#apt-get) to combine `apt-get update` and `apt-get install -y` in the same `RUN` statement for cache busting. +After installing, we clean up the apt cache to keep the layer lean. (Running `apt-get clean` isn't required—[official Debian images do it automatically](https://github.com/moby/moby/blob/03e2923e42446dbb830c654d0eec323a0b4ef02a/contrib/mkimage/debootstrap#L82-L105).) + +```Dockerfile +USER node +``` + +This and subsequent `chown` options in `COPY` instructions are for security. +[Services that can run without privileges should](https://docs.docker.com/develop/develop-images/instructions/#user). +The Node.js image includes a user, `node`, created with an explicit `uid` and `gid` (`1000`). +We reuse it. + +```Dockerfile +WORKDIR /home/node/app + +COPY --chown=node:node .yarnrc.yml . +COPY --chown=node:node package.json . +COPY --chown=node:node api/package.json api/ +COPY --chown=node:node web/package.json web/ +COPY --chown=node:node yarn.lock . +``` + +Here we copy the minimum set of files that the `yarn install` step needs. +The order isn't completely arbitrary—it tries to maximize [Docker's layer caching](https://docs.docker.com/build/cache/). +We expect `yarn.lock` to change more than the `package.json`s and the `package.json`s to change more than `.yarnrc.yml`. +That said, it's hard to argue that these files couldn't be arranged differently, or that the `COPY` instructions couldn't be combined. +The important thing is that they're all here, before the `yarn install` step: + +```Dockerfile +RUN mkdir -p /home/node/.yarn/berry/index +RUN mkdir -p /home/node/.cache + +RUN --mount=type=cache,target=/home/node/.yarn/berry/cache,uid=1000 \ + --mount=type=cache,target=/home/node/.cache,uid=1000 \ + CI=1 yarn install +``` + +This step installs all your project's dependencies—production and dev. +Since we use multi-stage builds, your production images won't pay for the dev dependencies installed in this step. +The build stages need the dev dependencies. + +The `mkdir` steps are a workaround for a permission error. We're working on removing them, but for now if you remove them the install step will probably fail. + +This step is a bit more involved than the others. +It uses a [cache mount](https://docs.docker.com/build/cache/#use-your-package-manager-wisely). +Yarn operates in three steps: resolution, fetch, and link. +If you're not careful, the cache for the fetch step basically doubles the number of `node_modules` installed on disk. +We could disable it all together, but by using a cache mount, we can still get the benefits without paying twice. +We set it to the default directory here, but you can change its location in `.yarnrc.yml`. +If you've done so you'll have to change it here too. + +One more thing to note: without setting `CI=1`, depending on the deploy provider, yarn may think it's in a TTY, making the logs difficult to read. With this set, yarn adapts accordingly. +Enabling CI enables [immutable installs](https://v3.yarnpkg.com/configuration/yarnrc#enableImmutableInstalls) and [inline builds](https://v3.yarnpkg.com/configuration/yarnrc#enableInlineBuilds), both of which are highly recommended. + +```Dockerfile +COPY --chown=node:node redwood.toml . +COPY --chown=node:node graphql.config.js . +COPY --chown=node:node .env.defaults .env.defaults +``` + +We'll need these config files for the build and production stages. +The `redwood.toml` file is Redwood's de-facto config file. +Both the build and serve stages read it to enable and configure functionality. + +:::warning `.env.defaults` is ok to include but `.env` is not + +If you add a secret to the Dockerfile, it can be excavated. +While it's technically true that multi stage builds add a sort of security layer, it's not a best practice. +Leave them out and look to your deploy provider for further configuration. + +::: + +### The `api_build` stage + +The `api_build` stage builds the api side: + +```Dockerfile +FROM base as api_build + +# If your api side build relies on build-time environment variables, +# specify them here as ARGs. +# +# ARG MY_BUILD_TIME_ENV_VAR + +COPY --chown=node:node api api +RUN yarn rw build api +``` + +After the work we did in the base stage, building the api side amounts to copying in the api directory and running `yarn rw build api`. + +### The `api_serve` stage + +The `api_serve` stage serves your GraphQL api and functions: + +```Dockerfile +FROM node:18-bookworm-slim as api_serve + +RUN corepack enable + +RUN apt-get update && apt-get install -y \ + openssl \ + # python3 make gcc \ + && rm -rf /var/lib/apt/lists/* +``` + +We don't start from the `base` stage, but begin anew with the `node:18-bookworm-slim` image. +Since this is a production stage, it's important for it to be as small as possible. +Docker's [multi-stage builds](https://docs.docker.com/build/building/multi-stage/) enables this. + +```Dockerfile +USER node +WORKDIR /home/node/app + +COPY --chown=node:node .yarnrc.yml .yarnrc.yml +COPY --chown=node:node package.json . +COPY --chown=node:node api/package.json api/ +COPY --chown=node:node yarn.lock yarn.lock +``` + +Like other `COPY` instructions, ordering these files with care enables layering caching. + +```Dockerfile +RUN mkdir -p /home/node/.yarn/berry/index +RUN mkdir -p /home/node/.cache + +RUN --mount=type=cache,target=/home/node/.yarn/berry/cache,uid=1000 \ + --mount=type=cache,target=/home/node/.cache,uid=1000 \ + CI=1 yarn workspaces focus api --production +``` + +This is a critical step for image size. +We don't use the regular `yarn install` command. +Using the [official workspaces plugin](https://github.com/yarnpkg/berry/tree/master/packages/plugin-workspace-tools)—which is included by default in yarn v4—we "focus" on the api workspace, only installing its production dependencies. + +The cache mount will be populated at this point from the install in the `base` stage, so the fetch step should fly by. + +```Dockerfile +COPY --chown=node:node redwood.toml . +COPY --chown=node:node graphql.config.js . +COPY --chown=node:node .env.defaults .env.defaults + +COPY --chown=node:node --from=api_build /home/node/app/api/dist /home/node/app/api/dist +COPY --chown=node:node --from=api_build /home/node/app/api/db /home/node/app/api/db +COPY --chown=node:node --from=api_build /home/node/app/node_modules/.prisma /home/node/app/node_modules/.prisma +``` + +Here's where we really take advantage of multi-stage builds by copying from the `api_build` stage. +At this point all the building has been done. Now we can just grab the artifacts without having to lug around the dev dependencies. + +There's one more thing that was built: the prisma client in `node_modules/.prisma`. +We need to grab it too. + +```Dockerfile +ENV NODE_ENV=production + +CMD [ "node_modules/.bin/rw-server", "api", "--load-env-files" ] +``` + +Lastly, the default command is to start the api server using the bin from the `@redwoodjs/api-server` package. +You can override this command if you have more specific needs. + +Note that the Redwood CLI isn't available anymore. (It's a dev dependency.) +To access the server bin, we have to find its path in `node_modules`. +Though this is somewhat discouraged in modern yarn, since we're using the `node-modules` node linker, it's in `node_modules/.bin`. + +### The `web_build` stage + +This `web_build` builds the web side: + +```Dockerfile +FROM base as web_build + +COPY --chown=node:node web web +RUN yarn rw build web --no-prerender +``` + +After the work we did in the base stage, building the web side amounts to copying in the web directory and running `yarn rw build web`. + +This stage is a bit of a simplification. +It foregoes Redwood's prerendering (SSG) capability. +Prerendering is a little trickier; see [the `web_prerender_build` stage](#the-web_prerender_build-stage). + +If you've included environment variables in your `redwood.toml`'s `web.includeEnvironmentVariables` field, you'll want to specify them as ARGs here. +The setup command should've inlined them for you. + +### The `web_prerender_build` stage + +The `web_prerender_build` stage builds the web side with prerender. + +```Dockerfile +FROM api_build as web_build_with_prerender + +COPY --chown=node:node web web +RUN yarn rw build web +``` + +Building the web side with prerendering poses a challenge. +Prerender needs the api side around to get data for your Cells and route hooks. +The key line here is the first one—this stage uses the `api_build` stage as its base image. + +### The `web_serve` stage + +```Dockerfile +FROM node:18-bookworm-slim as web_serve + +RUN corepack enable + +USER node +WORKDIR /home/node/app + +COPY --chown=node:node .yarnrc.yml . +COPY --chown=node:node package.json . +COPY --chown=node:node web/package.json web/ +COPY --chown=node:node yarn.lock . + +RUN mkdir -p /home/node/.yarn/berry/index +RUN mkdir -p /home/node/.cache + +RUN --mount=type=cache,target=/home/node/.yarn/berry/cache,uid=1000 \ + --mount=type=cache,target=/home/node/.cache,uid=1000 \ + CI=1 yarn workspaces focus web --production + +COPY --chown=node:node redwood.toml . +COPY --chown=node:node graphql.config.js . +COPY --chown=node:node .env.defaults .env.defaults + +COPY --chown=node:node --from=web_build /home/node/app/web/dist /home/node/app/web/dist + +ENV NODE_ENV=production \ + API_HOST=http://api:8911 + +CMD "node_modules/.bin/rw-web-server" "--apiHost" "$API_HOST" +``` + +Most of this stage is similar to the `api_serve` stage, except that we're copying from the `web_build` stage instead of the `api_build`. +(If you're prerendering, you'll want to change the `--from=web_build` to `--from=web_prerender_build`.) + +The binary we're using here to serve the web side is `rw-web-server` which comes from the `@redwoodjs/web-server` package. +While this web server will be much more fully featured in the future, right now it's mostly just to get you going. +Ideally you want to put a web server like Nginx or Caddy in front of it. + +Lastly, note that we use the shell form of `CMD` here for its variable expansion. + +### The `console` stage + +The `console` stage is an optional stage for debugging: + +```Dockerfile +FROM base as console + +# To add more packages: +# +# ``` +# USER root +# +# RUN apt-get update && apt-get install -y \ +# curl +# +# USER node +# ``` + +COPY --chown=node:node api api +COPY --chown=node:node web web +COPY --chown=node:node scripts scripts +``` + +The console stage completes the base stage by copying in the rest of your Redwood app. +But then it pretty much leaves you to your own devices. +The intended way to use it is to create an ephemeral container by starting a shell like `/bin/bash` in the image built by targeting this stage: + +```bash +# Build the console image: +docker build . -t console --target console +# Start an ephemeral container from it: +docker run --rm -it console /bin/bash +``` + +As the comment says, feel free to add more packages. +We intentionally kept them to a minimum in the base stage, but you shouldn't worry about the size of the image here. + +## Troubleshooting + +### Python + +We tried to make the Dockerfile as lean as possible. +In some cases, that means we excluded a dependency your project needs. +And by far the most common is Python. + +During a stage's `yarn install` step (`RUN ... yarn install`), if you see an error like the following: + +``` +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python Python is not set from command line or npm configuration +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python Python is not set from environment variable PYTHON +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python checking if "python3" can be used +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python - executable path is "" +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python - "" could not be run +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python checking if "python" can be used +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python - executable path is "" +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python - "" could not be run +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python ********************************************************** +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python You need to install the latest version of Python. +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python Node-gyp should be able to find and use Python. If not, +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python you can try one of the following options: +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python - Use the switch --python="/path/to/pythonexecutable" +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python (accepted by both node-gyp and npm) +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python - Set the environment variable PYTHON +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python - Set the npm configuration variable python: +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python npm config set python "/path/to/pythonexecutable" +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python For more information consult the documentation at: +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python https://github.com/nodejs/node-gyp#installation +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python ********************************************************** +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python +``` + +It's because your project depends on Python and the image doesn't provide it. + +It's easy to fix: just add `python3` and its dependencies (usually `make` and `gcc`): + +```diff + FROM node:18-bookworm-slim as base + + RUN apt-get update && apt-get install -y \ + openssl \ ++ python3 make gcc \ + && rm -rf /var/lib/apt/lists/* +``` + +Not sure why your project depends on Python? `yarn why` is your friend. +From the error message, we know `bufferutil` couldn't build. +But why do we have `bufferutil`? + +``` +yarn why bufferutil +└─ websocket@npm:1.0.34 + └─ bufferutil@npm:4.0.8 (via npm:^4.0.1) +``` + +`websocket` needs `bufferutil`. But why do we have `websocket`? +Keep pulling the thread till you get to a top-level dependency: + +``` +yarn why websocket +└─ @supabase/realtime-js@npm:2.8.4 + └─ websocket@npm:1.0.34 (via npm:^1.0.34) + +yarn why @supabase/realtime-js +└─ @supabase/supabase-js@npm:2.38.4 + └─ @supabase/realtime-js@npm:2.8.4 (via npm:^2.8.4) + +yarn why @supabase/supabase-js +├─ api@workspace:api +│ └─ @supabase/supabase-js@npm:2.38.4 (via npm:^2.21.0) +│ +└─ web@workspace:web + └─ @supabase/supabase-js@npm:2.38.4 (via npm:^2.21.0) +``` + +In this case, it looks like it's ultimately because of our auth provider, `@supabase/supabase-js`. diff --git a/docs/versioned_docs/version-6.0/environment-variables.md b/docs/versioned_docs/version-6.x/environment-variables.md similarity index 100% rename from docs/versioned_docs/version-6.0/environment-variables.md rename to docs/versioned_docs/version-6.x/environment-variables.md diff --git a/docs/versioned_docs/version-6.0/forms.md b/docs/versioned_docs/version-6.x/forms.md similarity index 100% rename from docs/versioned_docs/version-6.0/forms.md rename to docs/versioned_docs/version-6.x/forms.md diff --git a/docs/versioned_docs/version-6.0/graphql.md b/docs/versioned_docs/version-6.x/graphql.md similarity index 98% rename from docs/versioned_docs/version-6.0/graphql.md rename to docs/versioned_docs/version-6.x/graphql.md index ff6e55044f86..b8afad901199 100644 --- a/docs/versioned_docs/version-6.0/graphql.md +++ b/docs/versioned_docs/version-6.x/graphql.md @@ -999,7 +999,7 @@ export const handler = createGraphQLHandler({ }) ``` -> Note: Check-out the [in-depth look at Redwood Directives](directives.md) that explains how to generate directives so you may use them to validate access and transform the response. +> Note: Check-out the [in-depth look at Redwood Directives](./directives.md) that explains how to generate directives so you may use them to validate access and transform the response. ### Logging Setup @@ -1014,9 +1014,9 @@ Logging is essential in production apps to be alerted about critical errors and We want to make logging simple when using RedwoodJS and therefore have configured the api-side GraphQL handler to log common information about your queries and mutations. Log statements also be optionally enriched with [operation names](https://graphql.org/learn/queries/#operation-name), user agents, request ids, and performance timings to give you more visibility into your GraphQL api. -By configuring the GraphQL handler to use your api side [RedwoodJS logger](logger.md), any errors and other log statements about the [GraphQL execution](https://graphql.org/learn/execution/) will be logged to the [destination](logger.md#destination-aka-where-to-log) you've set up: to standard output, file, or transport stream. +By configuring the GraphQL handler to use your api side [RedwoodJS logger](./logger.md), any errors and other log statements about the [GraphQL execution](https://graphql.org/learn/execution/) will be logged to the [destination](./logger.md#destination-aka-where-to-log) you've set up: to standard output, file, or transport stream. -You configure the logger using the `loggerConfig` that accepts a [`logger`](logger.md) and a set of [GraphQL Logger Options](#graphql-logger-options). +You configure the logger using the `loggerConfig` that accepts a [`logger`](./logger.md) and a set of [GraphQL Logger Options](#graphql-logger-options). ### Configure the GraphQL Logger @@ -1147,9 +1147,9 @@ export const post = async ({ id }) => { //... ``` -The GraphQL handler will then take care of logging your query and data -- as long as your logger is setup to log at the `info` [level](logger.md#log-level) and above. +The GraphQL handler will then take care of logging your query and data -- as long as your logger is setup to log at the `info` [level](./logger.md#log-level) and above. -> You can also disable the statements in production by just logging at the `warn` [level](logger.md#log-level) or above +> You can also disable the statements in production by just logging at the `warn` [level](./logger.md#log-level) or above This means that you can keep your services free of logger statements, but still see what's happening! @@ -1184,7 +1184,7 @@ Stream to third-party log and application monitoring services vital to productio Everyone has heard of reports that Company X logged emails, or passwords to files or systems that may not have been secured. While RedwoodJS logging won't necessarily prevent that, it does provide you with the mechanism to ensure that won't happen. -To redact sensitive information, you can supply paths to keys that hold sensitive data using the RedwoodJS logger [redact option](logger.md#redaction). +To redact sensitive information, you can supply paths to keys that hold sensitive data using the RedwoodJS logger [redact option](./logger.md#redaction). Because this logger is used with the GraphQL handler, it will respect any redaction paths setup. @@ -1291,7 +1291,7 @@ By default, your GraphQL endpoint is open to the world. That means anyone can request any query and invoke any Mutation. Whatever types and fields are defined in your SDL is data that anyone can access. -Redwood [encourages being secure by default](http://localhost:3000/docs/canary/directives#secure-by-default-with-built-in-directives) by defaulting all queries and mutations to have the `@requireAuth` directive when generating SDL or a service. +Redwood [encourages being secure by default](./directives.md#secure-by-default-with-built-in-directives) by defaulting all queries and mutations to have the `@requireAuth` directive when generating SDL or a service. When your app builds and your server starts up, Redwood checks that **all** queries and mutations have `@requireAuth`, `@skipAuth` or a custom directive applied. @@ -1417,7 +1417,7 @@ The `@requireAuth` directive lets you define roles that are permitted to perform ```ts type Mutation { createPost(input: CreatePostInput!): Post! @requireAuth(roles: ['AUTHOR', 'EDITOR']) - updatePost(id: Int!, input: UpdatePostInput!): Post! @@requireAuth(roles: ['EDITOR'] + updatePost(id: Int!, input: UpdatePostInput!): Post! @requireAuth(roles: ['EDITOR'] deletePost(id: Int!): Post! @requireAuth(roles: ['ADMIN'] } ``` @@ -1472,7 +1472,7 @@ export const handler = createGraphQLHandler({ }) ``` -:::caution +:::warning Enabling introspection in production may pose a security risk, as it allows users to access information about your schema, queries, and mutations. Use this option with caution and make sure to secure your GraphQL API properly. diff --git a/docs/versioned_docs/version-6.0/how-to/background-worker.md b/docs/versioned_docs/version-6.x/how-to/background-worker.md similarity index 100% rename from docs/versioned_docs/version-6.0/how-to/background-worker.md rename to docs/versioned_docs/version-6.x/how-to/background-worker.md diff --git a/docs/versioned_docs/version-6.0/how-to/build-dashboards-fast-with-tremor.md b/docs/versioned_docs/version-6.x/how-to/build-dashboards-fast-with-tremor.md similarity index 100% rename from docs/versioned_docs/version-6.0/how-to/build-dashboards-fast-with-tremor.md rename to docs/versioned_docs/version-6.x/how-to/build-dashboards-fast-with-tremor.md diff --git a/docs/versioned_docs/version-6.0/how-to/custom-function.md b/docs/versioned_docs/version-6.x/how-to/custom-function.md similarity index 100% rename from docs/versioned_docs/version-6.0/how-to/custom-function.md rename to docs/versioned_docs/version-6.x/how-to/custom-function.md diff --git a/docs/versioned_docs/version-6.0/how-to/dbauth-passwordless.md b/docs/versioned_docs/version-6.x/how-to/dbauth-passwordless.md similarity index 99% rename from docs/versioned_docs/version-6.0/how-to/dbauth-passwordless.md rename to docs/versioned_docs/version-6.x/how-to/dbauth-passwordless.md index d938e62e20cb..e07e0bcd4c05 100644 --- a/docs/versioned_docs/version-6.0/how-to/dbauth-passwordless.md +++ b/docs/versioned_docs/version-6.x/how-to/dbauth-passwordless.md @@ -637,4 +637,4 @@ const Routes = () => { ``` ## You did it! -Now that you did you can rest easy. You're authentication relies on just your database but also, if some bad actor got access to it the only user data you have is really the email address. +Now that you did you can rest easy. Your authentication relies on just your database but also, if some bad actor got access to it the only user data you have is really the email address. diff --git a/docs/versioned_docs/version-6.0/how-to/disable-api-database.md b/docs/versioned_docs/version-6.x/how-to/disable-api-database.md similarity index 100% rename from docs/versioned_docs/version-6.0/how-to/disable-api-database.md rename to docs/versioned_docs/version-6.x/how-to/disable-api-database.md diff --git a/docs/versioned_docs/version-6.0/how-to/file-uploads.md b/docs/versioned_docs/version-6.x/how-to/file-uploads.md similarity index 100% rename from docs/versioned_docs/version-6.0/how-to/file-uploads.md rename to docs/versioned_docs/version-6.x/how-to/file-uploads.md diff --git a/docs/versioned_docs/version-6.0/how-to/gotrue-auth.md b/docs/versioned_docs/version-6.x/how-to/gotrue-auth.md similarity index 100% rename from docs/versioned_docs/version-6.0/how-to/gotrue-auth.md rename to docs/versioned_docs/version-6.x/how-to/gotrue-auth.md diff --git a/docs/versioned_docs/version-6.0/how-to/mocking-graphql-in-storybook.md b/docs/versioned_docs/version-6.x/how-to/mocking-graphql-in-storybook.md similarity index 100% rename from docs/versioned_docs/version-6.0/how-to/mocking-graphql-in-storybook.md rename to docs/versioned_docs/version-6.x/how-to/mocking-graphql-in-storybook.md diff --git a/docs/versioned_docs/version-6.x/how-to/oauth.md b/docs/versioned_docs/version-6.x/how-to/oauth.md new file mode 100644 index 000000000000..fb7449472c6c --- /dev/null +++ b/docs/versioned_docs/version-6.x/how-to/oauth.md @@ -0,0 +1,831 @@ +# OAuth + +If you're using an auth provider like [Auth0](/docs/auth/auth0), OAuth login to third party services (GitHub, Google, Facebook) is usually just a setting you can toggle on in your provider's dashboard. But if you're using [dbAuth](/docs/auth/dbauth) you'll only have username/password login to start. But, adding one or more OAuth clients isn't hard. This recipe will walk you through it from scratch, adding OAuth login via GitHub. + +## Prerequisites + +This article assumes you have an app set up and are using dbAuth. We're going to make use of the dbAuth system to validate that you're who you say you are. If you just want to try this code out in a sandbox app, you can create a test blog app from scratch by checking out the [Redwood codebase](https://github.com/redwoodjs/redwood) itself and then running a couple of commands: + +```bash +yarn install +yarn build + +# typescript +yarn run build:test-project ~/oauth-app + +# javascript +yarn run build:test-project ~/oauth-app --javascript +``` + +That will create a simple blog application at `~/oauth-app`. You'll get a login and signup page, which we're going to enhance to include a **Login with GitHub** button. + +Speaking of GitHub, you'll also need a GitHub account so you can create an [OAuth app](https://docs.github.com/en/apps/oauth-apps/building-oauth-apps/creating-an-oauth-app). + +We also assume you're familiar with the basics of OAuth and the terminology surrounding it. + +## Login Flow + +Here's the logic flow we're going to implement: + +1. User comes to the login page and clicks a **Login with GitHub** button/link. +2. The link directs the browser to GitHub's OAuth process at github.com. +3. The user logs in with their GitHub credentials and approves our app. +4. The browser is redirected back to our app, to a new function `/api/src/functions/oauth/oauth.js`. +5. The function fetches the OAuth **access_token** with a call to GitHub, using the **code** that was included with the redirect from GitHub in the previous step. +6. When the **access_token** is received, the function then requests the user data from GitHub via another fetch to GitHub's API. +7. The function then checks our database for a user identified by GitHub's `id`. If no user is found, the `User` record is created using the info from the fetch in the previous step. +8. The user data from our own database is used to create the same cookie that dbAuth creates on a successful login. +9. The browser is redirected back to our site, and the user is now logged in. + +## GitHub OAuth App Setup + +In order to allow OAuth login with GitHub, we need to create an OAuth App. The instructions below are for creating one on your personal GitHub account, but if your app lives in a separate organization then you can perform the same steps under the org instead. + +First go to your [Settings](https://github.com/settings/profile) and then the [Developer settings](https://github.com/settings/apps) at the bottom left. Finally, click the [OAuth Apps](https://github.com/settings/developers) nav item at left: + +![OAuth app settings screenshot](https://user-images.githubusercontent.com/300/245297416-34821cb6-ace0-4a6a-9bf6-4e434d3cefc5.png) + +Click [**New OAuth App**](https://github.com/settings/applications/new) and fill it in something like this: + +![New OAuth app settings](https://user-images.githubusercontent.com/300/245298106-b35a6abe-6e8c-4ab1-8ab5-7b7e1dcc0a39.png) + +The important part is the **Authorization callback URL** which is where GitHub will redirect you back once authenticated (step 4 of the login flow above). This callback URL assumes you're using the default function location of `/.redwood/functions`. If you've changed that in your app be sure to change it here as well. + +Click **Register application** and then on the screen that follows, click the **Generate a new client secret** button: + +![New client secret button](https://user-images.githubusercontent.com/300/245298639-6e08a201-b0db-4df6-975f-592544bdced7.png) + +You may be asked to use your 2FA code to verify that you're who you say you are, but eventually you should see your new **Client secret**. Copy that, and the **Client ID** above it: + +![Client secret](https://user-images.githubusercontent.com/300/245298897-129b5d00-3bed-4d7e-a40e-f4c9cda8a21f.png) + +Add those to your app's `.env` file (or wherever you're managing your secrets). Note that it's best to have a different OAuth app on GitHub for each environment you deploy to. Consider this one the **dev** app, and you'll create a separate one with a different client ID and secret when you're ready to deploy to production: + +```bash title="/.env" +GITHUB_OAUTH_CLIENT_ID=41a08ae238b5aee4121d +GITHUB_OAUTH_CLIENT_SECRET=92e8662e9c562aca8356d45562911542d89450e1 +``` + +We also need to denote what data we want permission to read from GitHub once someone authorizes our app. We'll want the user's public info, and probably their email address. That's only two scopes, and we can add those as another ENV var: + +```bash title="/.env" +GITHUB_OAUTH_CLIENT_ID=41a08ae238b5aee4121d +GITHUB_OAUTH_CLIENT_SECRET=92e8662e9c562aca8356d45562911542d89450e1 +# highlight-next-line +GITHUB_OAUTH_SCOPES="read:user user:email" +``` + +If you wanted access to more GitHub data, you can specify [additional scopes](https://docs.github.com/en/apps/oauth-apps/building-oauth-apps/scopes-for-oauth-apps) here and they'll be listed to the user when they go to authorize your app. You can also change this list in the future, but you'll need to log the user out and the next time they click **Login with GitHub** they'll be asked to authorize your app again, with a new list of requested scopes. + +One more ENV var, this is the same callback URL we told GitHub about. This is used in the link in the **Login with GitHub** button and gives GitHub another chance to verify that you're who you say you are: you're proving that you know where you're supposed to redirect back to: + +```bash title="/.env" +GITHUB_OAUTH_CLIENT_ID=41a08ae238b5aee4121d +GITHUB_OAUTH_CLIENT_SECRET=92e8662e9c562aca8356d45562911542d89450e1 +GITHUB_OAUTH_SCOPES="read:user user:email" +# highlight-next-line +GITHUB_OAUTH_REDIRECT_URI="http://localhost:8910/.redwood/functions/oauth/callback" +``` + +## The Login Button + +This part is pretty easy, we're just going to add a link/button to go directly to GitHub to begin the OAuth process: + +```jsx title="/web/src/pages/LoginPage/LoginPage.jsx" + + Login with GitHub + +``` + +:::info +This example uses Tailwind to style the link to match the rest of the default dbAuth login page +::: + +You can put this same link on your signup page as well, since using the OAuth flow will be dual-purpose: it will log the user in if a local user already exists, or it will create the user and then log them in. + +We're using several of our new ENV vars here, and need to tell Redwood to make them available to the web side during the build process. Add them to the `includeEnvironmentVariables` key in `redwood.toml`: + +```toml title="/redwood.toml" +[web] + title = "Redwood App" + port = "${WEB_DEV_PORT:8910}" + apiUrl = "/.redwood/functions" + # highlight-next-line + includeEnvironmentVariables = ["GITHUB_OAUTH_CLIENT_ID", "GITHUB_OAUTH_REDIRECT_URI", "GITHUB_OAUTH_SCOPES"] +[api] + port = "${API_DEV_PORT:8911}" +[browser] + open = true +[notifications] + versionUpdates = ["latest"] +``` + +Restart your dev server to pick up the new TOML settings, and your link should appear: + +![Login button](https://user-images.githubusercontent.com/300/245899085-0b946a14-cd7c-402a-9d86-b6527fd89c7f.png) + +Go ahead and click it, and you should be taken to GitHub to authorize your GitHub login to work with your app. You'll see the scopes we requested listed under the **Personal User Data** heading: + +![GitHub Oauth Access Page](https://user-images.githubusercontent.com/300/245899872-8ddd7e69-dbfa-4544-ab6f-78fd4ff02da8.png) + +:::warning + +If you get an error here that says "The redirect_uri MUST match the registered callback URL for this application" verify that the redirect URL you entered on GitHub and the one you put into the `GITHUB_OAUTH_REDIRECT_URL` ENV var are identical! + +::: + +Click **authorize** and you should end up seeing some JSON, and an error: + +![/oauth function not found](https://user-images.githubusercontent.com/300/245900327-b21a178e-5539-4c6d-a5d6-9bb736100940.png) + +That's coming from our app because we haven't created the `oauth` function that GitHub redirects to. But you'll see a `code` in the URL, which means GitHub is happy with our flow so far. Now we need to trade that `code` for an `access_token`. We'll do that in our `/oauth` function. + +:::info +This nicely formatted JSON comes from the [JSON Viewer](https://chrome.google.com/webstore/detail/json-viewer/gbmdgpbipfallnflgajpaliibnhdgobh) Chrome extension. +::: + +## The `/oauth` Function + +We can have Redwood generate a shell of our new function for us: + +```bash +yarn rw g function oauth +``` + +That will create the function at `/api/src/functions/oauth/oauth.js`. If we retry the **Login with GitHub** button now, we'll see the output of that function instead of the error: + +![Oauth function responding](https://user-images.githubusercontent.com/300/245903068-760596fa-4139-4d11-b3b3-a90edfbbf496.png) + +Now let's start filling out this function with the code we need to get the `access_token`. + +### Fetching the `access_token` + +We told GitHub to redirect to `/oauth/callback` which *appears* like it would be a subdirectory, or child route of our `oauth` function, but in reality everything after `/oauth` just gets shoved into an `event.path` variable that we'll need to inspect to make sure it has the proper parts (like `/callback`). We can do that in the `hander()`: + +```js title="/api/src/functions/oauth/oauth.js" +export const handler = async (event, _context) => { + switch (event.path) { + case '/oauth/callback': + return await callback(event) + default: + // Whatever this is, it's not correct, so return "Not Found" + return { + statusCode: 404, + } + } +} + +const callback = async (event) => { + return { body: 'ok' } +} +``` + +The `callback()` function is where we'll actually define the rest of our flow. We can verify this is working by trying a couple of different URLs in the browser and see that `/oauth/callback` returns a 200 and "ok" in the body of the page, but anything else returns a 404. + +Now we need to make a request to GitHub to trade the `code` for an `access_token`. This is handled by a `fetch`: + +```js title="/api/src/functions/oauth/oauth.js" +const callback = async (event) => { + // highlight-start + const { code } = event.queryStringParameters + + const response = await fetch(`https://github.com/login/oauth/access_token`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + client_id: process.env.GITHUB_OAUTH_CLIENT_ID, + client_secret: process.env.GITHUB_OAUTH_CLIENT_SECRET, + redirect_uri: process.env.GITHUB_OAUTH_REDIRECT_URI, + code, + }), + }) + + const { access_token, scope, error } = JSON.parse(await response.text()) + + if (error) { + return { statuscode: 400, body: error } + } + + return { + body: JSON.stringify({ access_token, scope, error }) + } + // highlight-end +} +``` + +First we get the `code` out of the query string variables, then make a POST `fetch()` to GitHub, setting the required JSON body to include several of the ENV vars we've set, as well as the `code` we got from the GitHub redirect. Then we parse the response JSON and just return it in the browser to make sure it worked. If something went wrong (`error` is not `undefined`) then we'll output the error message in the body of the page. + +Let's try it: go back to the login page, click the **Login with GitHub** button and see what happens: + +![GitHub OAuth access_token granted](https://user-images.githubusercontent.com/300/245906529-d08f9d6e-4947-4d14-9377-def3645d9c68.png) + +You can also verify that the error response works by, for example, removing the `code` key from the `fetch()`, and see GitHub complain: + +![GitHub OAuth error](https://user-images.githubusercontent.com/300/245906827-703a4a21-b279-428c-be1c-b73c559a72b3.png) + +Great, GitHub has authorized us and now we can get details about the actual user from GitHub. + +### Retrieving GitHub User Details + +We need some unique identifier to tie a user in GitHub to a user in our database. The `access_token` we retrieved allows us to make requests to GitHub's API and return data for the user that the `access_token` is attached to, up to the limits of the `scopes` we requested. GitHub has a unique user `id` that we can use to tie the two together. Let's request that data and dump it to the browser so we can see that the request works. + +To keep things straight in our heads, let's call our local user `user` and the GitHub user the `providerUser` (since GitHub is "providing" the OAuth credentials). + +Let's make the API call to GitHub's user info endpoint and dump the result to the browser: + +```js title="/api/src/functions/oauth/oauth.js" +const callback = async (event) => { + const { code } = event.queryStringParameters + + const response = await fetch(`https://github.com/login/oauth/access_token`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + client_id: process.env.GITHUB_OAUTH_CLIENT_ID, + client_secret: process.env.GITHUB_OAUTH_CLIENT_SECRET, + redirect_uri: process.env.GITHUB_OAUTH_REDIRECT_URI, + code, + }), + }) + + const { access_token, scope, error } = JSON.parse(await response.text()) + + if (error) { + return { statuscode: 400, body: error } + } + + // highlight-start + try { + const providerUser = await getProviderUser(access_token) + return { + body: JSON.stringify(providerUser) + } + } catch (e) { + return { statuscode: 500, body: e.message } + } + // highlight-end +} + +// highlight-start +const getProviderUser = async (token) => { + const response = await fetch('https://api.github.com/user', { + headers: { Authorization: `Bearer ${token}` }, + }) + const body = JSON.parse(await response.text()) + + return body +} +// highlight-end +``` + +If all went well you should get a ton of juicy data: + +![GitHub user output](https://user-images.githubusercontent.com/300/245909925-c984eeb4-f172-46f6-8102-297b72e26bbd.png) + +If something went wrong with the fetch you should get a 500 and the error message output in the body. Try setting the `token` in the `Authorization` header to something like `foobar` to verify: + +![GitHub API error](https://user-images.githubusercontent.com/300/245910198-2975e90e-9af1-49b1-a41a-81b9269ff71d.png) + +Great, we've got the user data, now what do we do with it? + +### Database Updates + +We've got a bunch of user data that we can use to create a `User` in our own database. But we'll want to look up that same user in the future when they log back in. We have a couple of ways we can go about doing this: + +1. Keep our `User` model as-is and create the user in our local database. When the user logs in again, look them by their email address stored in GitHub. **Cons:** If the user changes their email in GitHub we won't be able to find them the next time they log in, and we would create a new user. +2. Keep the `User` model as-is but create the user with the same `id` as the one we get from GitHub. **Cons:** If we keep username/password login, we would need to create new users with an `id` that won't ever clash with the ones from GitHub. +2. Add a column to `User` like `githubId` that stores the GitHub `id` so that we can find the user again the next time they come to login. **Cons:** If we add more providers in the future we'll need to keep adding new `*Id` columns for each. +3. Create a new one-to-many relationship model that stores the GitHub `id` as a single row, tied to the `userId` of the `User` table, and a new row for each ID of any future providers. **Cons:** More complex data structure. + +Option #4 will be the most flexible going forward if we ever decide to add more OAuth providers. And if my experience is any indication, everyone always wants more login providers. + +So let's create a new `Identity` table that stores the name of the provider and the ID in that system. Logically it will look like this: + +``` +┌───────────┐ ┌────────────┐ +│ User │ │ Identity │ +├───────────┤ ├────────────┤ +│ id │•──┐ │ id │ +│ name │ └──<│ userId │ +│ email │ │ provider │ +│ ... │ │ uid │ +└───────────┘ │ ... │ + └────────────┘ +``` + +For now `provider` will always be `github` and the `uid` will be the GitHub's unique ID. `uid` should be a `String`, because although GitHub's ID's are integers, not every OAuth provider is guaranteed to use ints. + +#### Prisma Schema Updates + +Here's the `Identity` model definition: + +```prisma title="/api/db/schema.prisma" +model Identity { + id Int @id @default(autoincrement()) + provider String + uid String + userId Int + user User @relation(fields: [userId], references: [id]) + accessToken String? + scope String? + lastLoginAt DateTime @default(now()) + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + + @@unique([provider, uid]) + @@index(userId) +} +``` + +We're also storing the `accessToken` and `scope` that we got back from the last time we retrived them from GitHub, as well as a timestamp for the last time the user logged in. Storing the `scope` is useful because if you ever change them, you may want to notify users that have the previous scope definition to re-login so the new scopes can be authorized. + +:::caution + +There's no GraphQL SDL tied to the Identity table, so it is not accessible via our API. But, if you ever did create an SDL and service, be sure that `accessToken` is not in the list of fields exposed publicly! + +::: + +We'll need to add an `identities` relation to the `User` model, and make the previously required `hashedPassword` and `salt` fields optional (since users may want to *only* authenticate via GitHub, they'll never get to enter a password): + +```prisma title="/api/db/schema.prisma" +model User { + id Int @id @default(autoincrement()) + email String @unique + // highlight-start + hashedPassword String? + salt String? + identities Identity[] + // highlight-end + ... +} +``` + +Save these as a migration and apply them to the database: + +```bash +yarn rw prisma migrate dev +``` + +Give it a name like "create identity". That's it for the database. Let's return to the `/oauth` function and start working with our new `Identity` model. + +### Creating Users and Identities + +On a successful GitHub OAuth login we'll want to first check and see if a user already exists with the provider info. If so, we can go ahead and log them in. If not, we'll need to create it first, then log them in. + +Let's add some code that returns the user if found, otherwise it creates the user *and* returns it, so that the rest of our code doesn't have to care. + +:::info +Be sure to import `db` at the top of the file if you haven't already! +::: + +```js title="/api/src/functions/oauth/oauth.js" +// highlight-start +import { db } from 'src/lib/db' +import { user, createUser } from 'src/services/users' +// highlight-end + +const callback = async (event) => { + const { code } = event.queryStringParameters + + const response = await fetch(`https://github.com/login/oauth/access_token`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + client_id: process.env.GITHUB_OAUTH_CLIENT_ID, + client_secret: process.env.GITHUB_OAUTH_CLIENT_SECRET, + redirect_uri: process.env.GITHUB_OAUTH_REDIRECT_URI, + code, + }), + }) + + const { access_token, scope, error } = JSON.parse(await response.text()) + + if (error) { + return { statuscode: 400, body: error } + } + + try { + const providerUser = await getProviderUser(access_token) + // highlight-start + const user = await getUser({ providerUser, accessToken: access_token, scope }) + return { + body: JSON.stringify(user) + } + // highlight-end + } catch (e) { + return { statuscode: 500, body: e.message } + } +} + +// highlight-start +const getUser = async ({ providerUser, accessToken, scope }) => { + const { user, identity } = await findOrCreateUser(providerUser) + + await db.identity.update({ + where: { id: identity.id }, + data: { accessToken, scope, lastLoginAt: new Date() }, + }) + + return user +} +// highlight-end + +// highlight-start +const findOrCreateUser = async (providerUser) => { + const identity = await db.identity.findFirst({ + where: { provider: 'github', uid: providerUser.id.toString() } + }) + + if (identity) { + // identity exists, return the user + const user = await user({ id: identity.userId }) + return { user, identity } + } + + // identity not found, need to create it and the user + const user = await createUser({ + input: { + email: providerUser.email, + fullName: providerUser.name, + }, + }) + + const identity = await tx.identity.create({ + data: { + userId: user.id, + provider: 'github', + uid: providerUser.id.toString() + } + }) + + return { user, identity } +} +// highlight-end +``` + +Let's break that down. + +```js +const providerUser = await getProviderUser(access_token) +// highlight-next-line +const user = await getUser({ providerUser, accessToken: access_token, scope }) +return { + body: JSON.stringify(user) +} +``` + +After getting the `providerUser` we're going to find our local `user`, and then dump the user to the browser to verify. + +```js +const getUser = async ({ providerUser, accessToken, scope }) => { + const { user, identity } = await getOrCreateUser(providerUser) + + await db.identity.update({ + where: { id: identity.id }, + data: { accessToken, scope, lastLoginAt: new Date() }, + }) + + return user +} +``` + +The `getUser()` function is going to return the user, whether it had to be created or not. Either way, the attached identity is updated with the current value for the `access_token` (note the case change, try not to get confused!), as well as the `scope` and `lastLoginAt` timestamp. `findOrCreateUser()` is going to do the heavy lifting: + +```js +const findOrCreateUser = async (providerUser) => { + const identity = await db.identity.findFirst({ + where: { provider: 'github', uid: providerUser.id.toString() } + }) + + if (identity) { + const user = await user({ id: identity.userId }) + return { user, identity } + } + + // ... +} +``` + +Note we're using the `user()` function defined in our service, re-using any business logic you may have added around looking up a user. If the user already exists, great! Return it, and the attached `identity` so that we can update the details. If the user doesn't exist already: + +```js +const findOrCreateUser = async (providerUser) => { + // ... + + const user = await createUser({ + input: { + email: providerUser.email, + fullName: providerUser.name, + }, + }) + + const identity = await tx.identity.create({ + data: { + userId: user.id, + provider: 'github', + uid: providerUser.id.toString() + } + }) + + return { user, identity } +} +``` + +We create the `user` via the existing `createUser()` service, but the `identity` directly in the database. For this particular usecase we have no need of allowing access to the `Identity` data via GraphQL, so there's no reason to create and SDL or underlying service. If you did make them available via GraphQL, it would make sense to replace this create with the `createIdentity()` service. Any error raised during creation would bubble up to the try/catch inside `callback()`. (The Redwood test project has a required `fullName` field that we fill with the `name` attribute from GitHub.) + +:::info +Don't forget the `toString()` calls whenever we read or write the `providerUser.id` since we made the `uid` of type `String`. +::: + +If everything worked then on clicking **Login with GitHub** we should now see a dump of the actual user from our local database: + +![User details](https://user-images.githubusercontent.com/300/245922971-caaeb3ed-9231-4edf-aac5-9ea76b488824.png) + +You can take a look in the database and verify that the User and Identity were created. Start up the [Prisma Studio](https://www.prisma.io/studio) (which is already included with Redwood): + +```bash +yarn rw prisma studio +``` + +![Inspecting the Identity record](https://user-images.githubusercontent.com/300/245923393-d61233cc-52d2-4568-858e-9059dfe31bfc.png) + +Great! But, if you go back to your homepage, you'll find that you're not actually logged in. That's because we're not setting the cookie that dbAuth expects to see to consider you logged in. Let's do that, and then our login will be complete! + +### Setting the Login Cookie + +In order to let dbAuth do the work of actually considering us logged in (and handling stuff like reauthentication and logout) we'll just set the same cookie that the username/password login system would have if the user logged in with a username and password. + +Setting a cookie in the browser is a matter of returning a `Set-Cookie` header in the response from the server. We've been responding with a dump of the user object, but now we'll do a real return, including the cookie and a `Location` header to redirect us back to the site. + +Redwood provides the cookie encryption helper as a function that you can use in your own code, as well as the function that returns the cookie name based on what you set in your auth config: + +```js title="/api/src/functions/oauth/oauth.js" +// highlight-start +import { cookieName, encryptSession } from '@redwoodjs/auth-dbauth-api' +import { cookieName as sessionCookieName } from 'src/lib/auth' +// highlight-end + +const callback = async (event) => { + const { code } = event.queryStringParameters + + const response = await fetch(`https://github.com/login/oauth/access_token`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + client_id: process.env.GITHUB_OAUTH_CLIENT_ID, + client_secret: process.env.GITHUB_OAUTH_CLIENT_SECRET, + redirect_uri: process.env.GITHUB_OAUTH_REDIRECT_URI, + code, + }), + }) + + const { access_token, scope, error } = JSON.parse(await response.text()) + + if (error) { + return { statuscode: 400, body: error } + } + + try { + const providerUser = await getProviderUser(access_token) + const user = await getUser({ + providerUser, + accessToken: access_token, + scope, + }) + // highlight-start + const cookie = secureCookie(user) + + return { + statusCode: 302, + headers: { + 'Set-Cookie': cookie, + Location: '/', + }, + } + // highlight-end + } catch (e) { + return { statuscode: 500, body: e.message } + } +} + +// highlight-start +const secureCookie = (user) => { + const expires = new Date() + expires.setFullYear(expires.getFullYear() + 1) + + const cookieAttrs = [ + `Expires=${expires.toUTCString()}`, + 'HttpOnly=true', + 'Path=/', + 'SameSite=Strict', + `Secure=${process.env.NODE_ENV !== 'development'}`, + ] + const data = JSON.stringify({ id: user.id }) + const encrypted = encryptSession(data) + + return [`${cookieName(sessionCookieName)}=${encrypted}`, ...cookieAttrs].join( + '; ' + ) +} +// highlight-end +``` + +`secureCookie()` takes care of creating the cookie that matches the one set by dbAuth. The attributes that we're setting are actually a copy of the ones set in the `authHandler` in `/api/src/functions/auth.js` and you could remove some duplication between the two by exporting the `cookie` object from `auth.js` and then importing it and using it here. We've set the cookie to expire in a year because, let's admit it, no one likes having to log back in again. + +At the end of `callback()` we set the `Set-Cookie` and `Location` headers to send the browser back to the homepage of our app. + +Try it out, and as long as you have an indication on your site that a user is logged in, you should see it! In the case of the test project, you'll see "Log Out" at the right side of the top nav instead of "Log In". Try logging out and then back again to test the whole flow from scratch. + +## The Complete `/oauth` Function + +Here's the `oauth` function in its entirety: + +```jsx title="/api/src/functions/oauth/oauth.js" +import { cookieName, encryptSession } from '@redwoodjs/auth-dbauth-api' + +import { cookieName as sessionCookieName } from 'src/lib/auth' +import { user, createUser } from 'src/services/users' +import { db } from 'src/lib/db' + +export const handler = async (event, _context) => { + switch (event.path) { + case '/oauth/callback': + return await callback(event) + default: + // Whatever this is, it's not correct, so return "Not Found" + return { + statusCode: 404, + } + } +} + +const callback = async (event) => { + const { code } = event.queryStringParameters + + const response = await fetch(`https://github.com/login/oauth/access_token`, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + client_id: process.env.GITHUB_OAUTH_CLIENT_ID, + client_secret: process.env.GITHUB_OAUTH_CLIENT_SECRET, + redirect_uri: process.env.GITHUB_OAUTH_REDIRECT_URI, + code, + }), + }) + + const { access_token, scope, error } = JSON.parse(await response.text()) + + if (error) { + return { statuscode: 400, body: error } + } + + try { + const providerUser = await getProviderUser(access_token) + const user = await getUser({ + providerUser, + accessToken: access_token, + scope, + }) + const cookie = secureCookie(user) + + return { + statusCode: 302, + headers: { + 'Set-Cookie': cookie, + Location: '/', + }, + } + } catch (e) { + return { statuscode: 500, body: e.message } + } +} + +const secureCookie = (user) => { + const expires = new Date() + expires.setFullYear(expires.getFullYear() + 1) + + const cookieAttrs = [ + `Expires=${expires.toUTCString()}`, + 'HttpOnly=true', + 'Path=/', + 'SameSite=Strict', + `Secure=${process.env.NODE_ENV !== 'development'}`, + ] + const data = JSON.stringify({ id: user.id }) + const encrypted = encryptSession(data) + + return [`${cookieName(sessionCookieName)}=${encrypted}`, ...cookieAttrs].join( + '; ' + ) +} + +const getProviderUser = async (token) => { + const response = await fetch('https://api.github.com/user', { + headers: { Authorization: `Bearer ${token}` }, + }) + const body = JSON.parse(await response.text()) + + return body +} + +const getUser = async ({ providerUser, accessToken, scope }) => { + const { user, identity } = await findOrCreateUser(providerUser) + + await db.identity.update({ + where: { id: identity.id }, + data: { accessToken, scope, lastLoginAt: new Date() }, + }) + + return user +} + +const findOrCreateUser = async (providerUser) => { + const identity = await db.identity.findFirst({ + where: { provider: 'github', uid: providerUser.id.toString() }, + }) + + if (identity) { + // identity exists, return the user + const user = await user({ id: identity.userId }) + return { user, identity } + } + + // identity not found, need to create it and the user + const user = await createUser({ + input: { + email: providerUser.email, + fullName: providerUser.name, + }, + }) + + const identity = await tx.identity.create({ + data: { + userId: user.id, + provider: 'github', + uid: providerUser.id.toString(), + }, + }) + + return { user, identity } +} +``` + +## Enhancements + +This is barebones implementation of a single OAuth provider. What can we do to make it better? + +### More Providers + +We hardcoded "github" as the provider in a couple of places, as well as hardcoding GitHub's API endpoint for fetching user data. That obviously limits this implementation to only support GitHub. + +A more flexible version could include the provider as part of the callback URL, and then our code can see that and choose which provider to set and how to get user details. Maybe the OAuth redirect is `/oauth/github/callback` and `/oauth/twitter/callback`. Then parse that out and delegate to a different function altogether, or implement each provider's specific info into separate files and `import` them into the `/oauth` function, invoking each as needed. + +### Changing User Details + +Right now we just copy the user details from GitHub right into our new User object. Maybe we want to give the user a chance to update those details first, or add additional information before saving to the database. One solution could be to create the `Identity` record, but redirect to your real Signup page with the info from GitHub (and the `accessToken`) and prefill the signup fields, giving the user a chance to change or enhance them, adding the `accessToken` to a hidden field. Then when the user submits that form, if the `accessToken` is part of the form, get the user details from GitHub again (so we can get their GitHub `id`) and then create the `Identity` and `User` record as before. + +### Better Error Handling + +Right now if an error occurs in the OAuth flow, the browser just stays on the `/oauth/callback` function and sees a plain text error message. A better experience would be to redirect the user back to the login page, with the error message in a query string variable, something like `http://localhost:8910/login?error=Application+not+authorized` Then in the LoginPage, add a `useParams()` to pull out the query variables, and show a toast message if an error is present: + +```jsx +import { useParams } from '@redwoodjs/router' +import { toast, Toaster } from '@redwoodjs/web/toast' + +const LoginPage = () => { + const params = useParams() + + useEffect(() => { + if (params.error) { + toast.error(error) + } + }, [params] + + return ( + <> + + // ... + + ) +} +``` diff --git a/docs/versioned_docs/version-6.0/how-to/pagination.md b/docs/versioned_docs/version-6.x/how-to/pagination.md similarity index 100% rename from docs/versioned_docs/version-6.0/how-to/pagination.md rename to docs/versioned_docs/version-6.x/how-to/pagination.md diff --git a/docs/versioned_docs/version-6.0/how-to/role-based-access-control.md b/docs/versioned_docs/version-6.x/how-to/role-based-access-control.md similarity index 98% rename from docs/versioned_docs/version-6.0/how-to/role-based-access-control.md rename to docs/versioned_docs/version-6.x/how-to/role-based-access-control.md index dbe467f68964..da138d62d71f 100644 --- a/docs/versioned_docs/version-6.0/how-to/role-based-access-control.md +++ b/docs/versioned_docs/version-6.x/how-to/role-based-access-control.md @@ -238,10 +238,10 @@ export const getCurrentUser = async (decoded) => { #### How to Protect a Route -To protect a `Private` route for access by a single role: +To protect a `PrivateSet` route for access by a single role: ```jsx -import { Router, Route, Private } from '@redwoodjs/router' +import { Router, Route, PrivateSet } from '@redwoodjs/router' const Routes = () => { return ( @@ -254,10 +254,10 @@ const Routes = () => { } ``` -To protect a `Private` route for access by a multiple roles: +To protect a `PrivateSet` route for access by a multiple roles: ```jsx -import { Router, Route, Private } from '@redwoodjs/router' +import { Router, Route, PrivateSet } from '@redwoodjs/router' const Routes = () => { return ( diff --git a/docs/versioned_docs/version-6.0/how-to/self-hosting-redwood.md b/docs/versioned_docs/version-6.x/how-to/self-hosting-redwood.md similarity index 99% rename from docs/versioned_docs/version-6.0/how-to/self-hosting-redwood.md rename to docs/versioned_docs/version-6.x/how-to/self-hosting-redwood.md index 63a01b75e95e..870b61ef84a9 100644 --- a/docs/versioned_docs/version-6.0/how-to/self-hosting-redwood.md +++ b/docs/versioned_docs/version-6.x/how-to/self-hosting-redwood.md @@ -1,5 +1,5 @@ # Self-hosting Redwood (Serverful) -:::caution +:::warning This doc has been deprecated in favor of the [Baremetal](../deploy/baremetal.md) docs. diff --git a/docs/versioned_docs/version-6.0/how-to/sending-emails.md b/docs/versioned_docs/version-6.x/how-to/sending-emails.md similarity index 100% rename from docs/versioned_docs/version-6.0/how-to/sending-emails.md rename to docs/versioned_docs/version-6.x/how-to/sending-emails.md diff --git a/docs/versioned_docs/version-6.0/how-to/supabase-auth.md b/docs/versioned_docs/version-6.x/how-to/supabase-auth.md similarity index 100% rename from docs/versioned_docs/version-6.0/how-to/supabase-auth.md rename to docs/versioned_docs/version-6.x/how-to/supabase-auth.md diff --git a/docs/versioned_docs/version-6.0/how-to/test-in-github-actions.md b/docs/versioned_docs/version-6.x/how-to/test-in-github-actions.md similarity index 99% rename from docs/versioned_docs/version-6.0/how-to/test-in-github-actions.md rename to docs/versioned_docs/version-6.x/how-to/test-in-github-actions.md index dc41fbecff2f..8064e49b681f 100644 --- a/docs/versioned_docs/version-6.0/how-to/test-in-github-actions.md +++ b/docs/versioned_docs/version-6.x/how-to/test-in-github-actions.md @@ -93,7 +93,7 @@ model UserExample { Then add your connection strings to your `.env` file: -:::caution +:::warning Make sure you don't commit this file to your repo since it contains sensitive information. diff --git a/docs/versioned_docs/version-6.0/how-to/using-a-third-party-api.md b/docs/versioned_docs/version-6.x/how-to/using-a-third-party-api.md similarity index 100% rename from docs/versioned_docs/version-6.0/how-to/using-a-third-party-api.md rename to docs/versioned_docs/version-6.x/how-to/using-a-third-party-api.md diff --git a/docs/versioned_docs/version-6.0/how-to/using-nvm.md b/docs/versioned_docs/version-6.x/how-to/using-nvm.md similarity index 99% rename from docs/versioned_docs/version-6.0/how-to/using-nvm.md rename to docs/versioned_docs/version-6.x/how-to/using-nvm.md index b7fb82ac43c7..05ed1f716bca 100644 --- a/docs/versioned_docs/version-6.0/how-to/using-nvm.md +++ b/docs/versioned_docs/version-6.x/how-to/using-nvm.md @@ -6,7 +6,7 @@ ## Installing nvm -:::caution +:::warning If you've already installed Node.js on your machine, uninstall Node.js before installing nvm. This will prevent any conflicts between the Node.js and nvm. ### If you're on a Mac @@ -77,7 +77,7 @@ To see all the versions of Node that you can install, run the following command: nvm ls-remote ``` -:::caution +:::warning You'll need to [install yarn](https://yarnpkg.com/getting-started/install) **for each version of Node that you install.** [Corepack](https://nodejs.org/dist/latest/docs/api/corepack.html) is included with all Node.js >=16.10 installs, but you must opt-in. To enable it, run the following command: diff --git a/docs/versioned_docs/version-6.0/how-to/using-yarn.md b/docs/versioned_docs/version-6.x/how-to/using-yarn.md similarity index 98% rename from docs/versioned_docs/version-6.0/how-to/using-yarn.md rename to docs/versioned_docs/version-6.x/how-to/using-yarn.md index 3c429e41a38c..5198209e6e85 100644 --- a/docs/versioned_docs/version-6.0/how-to/using-yarn.md +++ b/docs/versioned_docs/version-6.x/how-to/using-yarn.md @@ -22,7 +22,7 @@ To see the version of yarn that you have installed, run the following command: yarn --version ``` -**Redwood requires Yarn (>=1.15)** +**Redwood requires Yarn (>=1.22.21)** You can upgrade yarn by running the following command: diff --git a/docs/versioned_docs/version-6.0/how-to/windows-development-setup.md b/docs/versioned_docs/version-6.x/how-to/windows-development-setup.md similarity index 100% rename from docs/versioned_docs/version-6.0/how-to/windows-development-setup.md rename to docs/versioned_docs/version-6.x/how-to/windows-development-setup.md diff --git a/docs/versioned_docs/version-6.0/intro-to-servers.md b/docs/versioned_docs/version-6.x/intro-to-servers.md similarity index 99% rename from docs/versioned_docs/version-6.0/intro-to-servers.md rename to docs/versioned_docs/version-6.x/intro-to-servers.md index b3d05cbfb941..121097e67e6f 100644 --- a/docs/versioned_docs/version-6.0/intro-to-servers.md +++ b/docs/versioned_docs/version-6.x/intro-to-servers.md @@ -63,7 +63,7 @@ If you're connecting to cloud-based servers, turning them on and off, and potent Once you're past that prompt you'll then either be prompted for your password, or logged in automatically (when using a private or public key). Let's look at each one in detail. -:::caution Baremetal First Deploy Woes? +:::warning Baremetal First Deploy Woes? If you're having trouble deploying to your server with Baremetal, and you've never connected to your server manually via SSH, this could be why: Baremetal provides no interactive prompt to accept this server fingerprint. You need to connect manually at least once before Baremetal can connect. diff --git a/docs/versioned_docs/version-6.0/introduction.md b/docs/versioned_docs/version-6.x/introduction.md similarity index 91% rename from docs/versioned_docs/version-6.0/introduction.md rename to docs/versioned_docs/version-6.x/introduction.md index 560cd1fce7d0..7a89099ca8d1 100644 --- a/docs/versioned_docs/version-6.0/introduction.md +++ b/docs/versioned_docs/version-6.x/introduction.md @@ -10,8 +10,9 @@ For full inspiration and vision, see Redwood's [README](https://github.com/redwo Development on Redwood happens in the [redwoodjs/redwood repo on GitHub](https://github.com/redwoodjs/redwood). The docs are [there too](https://github.com/redwoodjs/redwood/tree/main/docs). -While Redwood's [founders and core team](https://github.com/redwoodjs/redwood#core-team) handle most of the high-priority items and the day-to-day, -Redwood wouldn't be where it is without [all its contributors](https://github.com/redwoodjs/redwood#all-contributors)! +While Redwood's [leadership and maintainers](https://github.com/redwoodjs/redwood#core-team-leadership) +handle most of the high-priority items and the day-to-day, Redwood wouldn't be +where it is without [all its contributors](https://github.com/redwoodjs/redwood#all-contributors)! Feel free to reach out to us on the [forums](https://community.redwoodjs.com) or on [Discord](https://discord.gg/redwoodjs), and follow us on [Twitter](https://twitter.com/redwoodjs) for updates. ## Getting the Most out of Redwood diff --git a/docs/versioned_docs/version-6.0/local-postgres-setup.md b/docs/versioned_docs/version-6.x/local-postgres-setup.md similarity index 100% rename from docs/versioned_docs/version-6.0/local-postgres-setup.md rename to docs/versioned_docs/version-6.x/local-postgres-setup.md diff --git a/docs/versioned_docs/version-6.0/logger.md b/docs/versioned_docs/version-6.x/logger.md similarity index 100% rename from docs/versioned_docs/version-6.0/logger.md rename to docs/versioned_docs/version-6.x/logger.md diff --git a/docs/versioned_docs/version-6.x/mailer.md b/docs/versioned_docs/version-6.x/mailer.md new file mode 100644 index 000000000000..0707e9b5bc85 --- /dev/null +++ b/docs/versioned_docs/version-6.x/mailer.md @@ -0,0 +1,289 @@ +# Mailer + +RedwoodJS offers a convenient Mailer for sending emails to your users. It's not just about sending an email; delivery matters too. The way you deliver the feature requiring email is as significant as how you prepare the mail to be delivered by the infrastructure that sends emails over the internet. + +When designing the Mailer, it was crucial that mail could be: + +* sent by popular third-party services like [Resend](), [SendGrid](), [Postmark](), [Amazon SES](), and others. +* sent by [Nodemailer]() as a self-hosted OSS solution. +* use different providers depending on the use case. For instance, some transactional emails might be sent via Resend and some digest emails sent by SES. You should be able to choose the method for a specific email. +* send safely in both development and test environments in a "sandbox" without worrying that emails might accidentally leak. +* be sent as text and/or html and composed using templates by popular tools like [React Email]() or [MJML](), with support for more methods in the future. +* unit tested to set the proper to, from, cc, subject, body, and more. +* integrated with RedwoodJS Studio to help design and preview templates. + +The RedwoodJS Mailer does more than "just send an email". It is a complete end-to-end design, development, and testing package for emails. + +## Overview + +The RedwoodJS Mailer consists of [handlers](#handlers) and [renderers](#renderers), which carry out the core functionality of sending (handling) your emails and composing (rendering) your emails, respectively. This is combined with a few required files which define the necessary configuration. + +A high-level overview of the Mailer Flow is shown in the diagram below, and each case is covered in more detail below the diagram. +mailer-flow + +### Renderers + +A **renderer** transforms your React components into strings of text or HTML that can be sent as an email. + +Mailer currently offers the following renderers: +* [@redwoodjs/mailer-renderer-react-email]() based on [React Email]() +* [@redwoodjs/mailer-renderer-mjml-react]() based on [MJML]() + +You can find community-maintained renderers by searching across npm, our forums, and other community spaces. + +:::important + +Email clients are notoriously inconsistent in how they render HTML into the visual email content. Consider using a robust react library to help you write components that produce attractive emails, rendered consistently across email clients. + +::: + +### Handlers + +A **handler** is responsible for taking your rendered content and passing it on to a service that can send your email to the intended recipients, e.g., Nodemailer or Amazon SES. + +Mailer currently offers the following handlers: +* [@redwoodjs/mailer-handler-in-memory](), a simple in-memory handler typically used for testing. +* [@redwoodjs/mailer-handler-nodemailer](), which uses [Nodemailer](). +* [@redwoodjs/mailer-handler-studio](), which sends emails to the RedwoodJS Studio using nodemailer internally. +* [@redwoodjs/mailer-handler-resend](), which uses [Resend](). + +You can find community-maintained handlers by searching across npm, our forums, and other community spaces. + +### Files & Directories + +The core file for the Mailer functions is `api/src/lib/mailer.ts`. This file contains configuration defining which handlers and renderers to use and when. It starts out looking like this: +```ts title=api/src/lib/mailer.ts +import { Mailer } from '@redwoodjs/mailer-core' +import { NodemailerMailHandler } from '@redwoodjs/mailer-handler-nodemailer' +import { ReactEmailRenderer } from '@redwoodjs/mailer-renderer-react-email' + +import { logger } from 'src/lib/logger' + +export const mailer = new Mailer({ + handling: { + handlers: { + // TODO: Update this handler config or switch it out for a different handler completely + nodemailer: new NodemailerMailHandler({ + transport: { + host: 'localhost', + port: 4319, + secure: false, + }, + }), + }, + default: 'nodemailer', + }, + + rendering: { + renderers: { + reactEmail: new ReactEmailRenderer(), + }, + default: 'reactEmail', + }, + + logger, +}) +``` + +In the above, you can see how handlers and renderers are defined. Handlers are defined in the `handling` object where the keys are any name you wish to give, and the values are instances of the handler you want to use. Similarly for renderers, which are defined in the `rendering` object. Each must have a `default` provided, specifying which option to use by default in production. + +Mailer also expects you to put your mail react components inside the `api/src/mail` directory. For example, if you had a welcome email, it should be found in `api/src/mail/Welcome/Welcome.tsx`. + +## Setup + +The Mailer is not set up by default when you create a new RedwoodJS app, but it is easy to do so. Simply run the following CLI command: + +```bash title="RedwoodJS CLI" +yarn rw setup mailer +``` + +This command sets up the necessary files and dependencies. You can find more information on this command at [this](https://redwoodjs.com/docs/cli-commands#setup-mailer) specific section of our docs. + +## Usage + +### Example + +The best way to understand using the Mailer is with an example. + +In the tutorial, we built out a blog site. Let's say we have added a contact us functionality and the contact us form takes a name, email, and message and stores it in the database. + +For this example, suppose we want to also send an email to some internal inbox with this contact us submission. + +The service would be updated like so: + +```ts title=api/src/services/contacts.ts +import { mailer } from 'src/lib/mailer' +import { ContactUsEmail } from 'src/mail/Example/Example' + +// ... + +export const createContact: MutationResolvers['createContact'] = async ({ + input, +}) => { + const contact = await db.contact.create({ + data: input, + }) + + // Send email + await mailer.send( + ContactUsEmail({ + name: input.name, + email: input.email, + // Note the date is hardcoded here for the sake of test snapshot consistency + when: new Date(0).toLocaleString(), + }), + { + to: 'inbox@example.com', + subject: 'New Contact Us Submission', + replyTo: input.email, + from: 'contact-us@example.com', + } + ) + + return contact +} +``` + +In the code above, we do the following: + +- Import the Mailer and our mail template. +- Call the `mailer.send` function with: + - Our template, which we pass props into based on the user input. + - A set of send options to specify to, from, etc. + +In the example above, we specified a `replyTo` because that suited our business logic. However, we probably don't want to write `replyTo: 'no-reply@example.com'` in all our other emails where we might want that to be set. + +In that case, we can use the `defaults` property in our `api/src/lib/mailer.ts` config: + +```ts title=api/src/lib/mailer.ts +defaults: { + replyTo: 'no-reply@example.com', +}, +``` + +Now that we implemented our example, we might start to think about testing or how to try this out ourselves during development. + +The Mailer behaves slightly differently based on which environment you are running in. + +This helps improve your experience as you don't have to worry about sending real emails during testing or development. + +### Testing + +When your `NODE_ENV` is set to `test`, then the Mailer will start in test mode. In this mode, all mail will be sent using a test handler rather than the default production one or any specific one set when calling `send` or `sendWithoutRendering`. + +By default, when the Mailer is created, it will check if the `@redwoodjs/mailer-handler-in-memory` package is available. If it is, this will become the test handler; otherwise, the test handler will be a no-op that does nothing. The `yarn rw setup mailer` command adds this `@redwoodjs/mailer-handler-in-memory` package as a `devDependency` automatically for you. + +If you want control over this test mode behavior, you can include the following configuration in the `mailer.ts` file: + +```ts title=api/src/lib/mailer.ts +test: { + when: process.env.NODE_ENV === 'test', + handler: 'someOtherHandler', +} +``` + +The `when` property can either be a boolean or a function that returns a boolean. This decides if the Mailer starts in test mode when it is created. The `handler` property can specify a different handler to use in test mode. + +As an example of how this helps with testing, let's work off the example we created above. Let's now test our email functionality in the corresponding test file: + +```ts title=api/src/services/contacts/contacts.test.ts +describe('contacts', () => { + scenario('creates a contact', async () => { + const result = await createContact({ + input: { name: 'String', email: 'String', message: 'String' }, + }) + + expect(result.name).toEqual('String') + expect(result.email).toEqual('String') + expect(result.message).toEqual('String') + + // Mail + const testHandler = mailer.getTestHandler() as InMemoryMailHandler + expect(testHandler.inbox.length).toBe(1) + const sentMail = testHandler.inbox[0] + expect({ + ...sentMail, + htmlContent: undefined, + textContent: undefined, + }).toMatchInlineSnapshot(` + { + "attachments": [], + "bcc": [], + "cc": [], + "from": "contact-us@example.com", + "handler": "nodemailer", + "handlerOptions": undefined, + "headers": {}, + "htmlContent": undefined, + "renderer": "reactEmail", + "rendererOptions": {}, + "replyTo": "String", + "subject": "New Contact Us Submission", + "textContent": undefined, + "to": [ + "inbox@example.com", + ], + } + `) + expect(sentMail.htmlContent).toMatchSnapshot() + expect(sentMail.textContent).toMatchSnapshot() + }) +}) +``` + +Above we tested that our service did the following: + +- Sent one email. +- All the send options (such as to, from, what handler, etc.) match a set of expected values (the inline snapshot). +- The rendered text and HTML content match the expected value (the snapshots). + +### Development + +Similar to the test mode, the Mailer also has a development mode. This mode is selected automatically when the Mailer is created if `NODE_ENV` is **not** set to `production`. This mode behaves similarly to the test mode and by default will attempt to use the `@redwoodjs/mailer-handler-studio` package if it is available. + +You can control the development mode behavior with the following configuration in the `mailer.ts` file: + +```ts title=api/src/lib/mailer.ts +development: { + when: process.env.NODE_ENV !== 'production', + handler: 'someOtherHandler', +}, +``` + +:::tip + +The Mailer studio has some helpful features when it comes to using the Mailer during development. It can provide a mail inbox so that you can send mail to your local machine and see the results. It can also provide live previews of your rendered mail templates as a guide to what they will likely look like when sent to your end users. + +::: + +### Production + +If neither the test nor development mode conditions are met, the Mailer will start in production mode. In this mode, there is no rerouting of your mail to different handlers. Instead, your mail will go directly to your default handler unless you specifically state a different one in your send options. + +### Studio + +Redwood Studio is tightly integrated with the mailer. The goal is to provide you with not just the ability to send mail but also the development tools to make your experience easier and more enjoyable. + +#### Template Previews +mailer-template-preview + +You can have a preview of what your mail templates will look like. These will rerender live as you update your template code and you can even provide a JSON payload which will be used as the props to your template component. These previews are approximate but should easily get you 90% of the way there. + +#### Local Inbox +mailer-local-inbox + +When running in development mode, using the default `@redwoodjs/mailer-handler-studio` development handler, your mail will be sent to a local SMTP inbox running inside of Studio. This allows you to use your app and have full emails sent without worrying about setting up a local inbox yourself or using some other online temporary inbox service. + +:::warning + +Redwood Studio is an experimental feature and is still in development. Some of the UI shown above might look slightly different and the functionality may be tweaked over time to provide you with a better experience. + +::: + +## Need a Renderer or Handler? + +If the Mailer does not currenly provide a [handler](notion://www.notion.so/redwoodjs/133467eb46b744fd8ae60df2d493d7d0#handlers) or [renderer](notion://www.notion.so/redwoodjs/133467eb46b744fd8ae60df2d493d7d0#renderers) for the service or technology you wish to use, this doesn't prevent you from using the Mailer. Instead, you can create your own handler or renderer which you can then open source to the wider RedwoodJS community. + +To do this, read over the existing implementations for handlers [here](https://github.com/redwoodjs/redwood/tree/main/packages/mailer/handlers) and renderers [here](https://github.com/redwoodjs/redwood/tree/main/packages/mailer/renderers). You can also find the interfaces that a handler or mailer must satisfy [here](https://github.com/redwoodjs/redwood/tree/main/packages/mailer/core) in the `@redwoodjs/mailer-core` package. + +Be sure to check out the community forum for people working on similar work, to document your own creations, or to get help on anything. diff --git a/docs/versioned_docs/version-6.0/mocking-graphql-requests.md b/docs/versioned_docs/version-6.x/mocking-graphql-requests.md similarity index 100% rename from docs/versioned_docs/version-6.0/mocking-graphql-requests.md rename to docs/versioned_docs/version-6.x/mocking-graphql-requests.md diff --git a/docs/versioned_docs/version-6.0/prerender.md b/docs/versioned_docs/version-6.x/prerender.md similarity index 98% rename from docs/versioned_docs/version-6.0/prerender.md rename to docs/versioned_docs/version-6.x/prerender.md index fa467aea495c..2ac27195086f 100644 --- a/docs/versioned_docs/version-6.0/prerender.md +++ b/docs/versioned_docs/version-6.x/prerender.md @@ -11,7 +11,7 @@ We thought a lot about what the developer experience should be for route-based p :::info How's Prerendering different from SSR/SSG/SWR/ISSG/...? As Danny said in his [Prerender demo](https://www.youtube.com/watch?v=iorKyMlASZc&t=2844s) at our Community Meetup, the thing all of these have in common is that they render your markup in a Node.js context to produce HTML. The difference is when (build or runtime) and how often. -Redwood currently supports prerendering at _build_ time. So before your deploy your web side, Redwood will render your pages into HTML, and once the JavaScript has been loaded on the browser, the page becomes dynamic. +Redwood currently supports prerendering at _build_ time. So before you deploy your web side, Redwood will render your pages into HTML, and once the JavaScript has been loaded on the browser, the page becomes dynamic. ::: @@ -56,7 +56,7 @@ This will prerender your NotFoundPage to `404.html` in your dist folder. Note th For Private Routes, Redwood prerenders your Private Routes' `whileLoadingAuth` prop: ```jsx - + // Loading is shown while we're checking to see if the user's logged in } prerender/> diff --git a/docs/versioned_docs/version-6.0/project-configuration-dev-test-build.mdx b/docs/versioned_docs/version-6.x/project-configuration-dev-test-build.mdx similarity index 84% rename from docs/versioned_docs/version-6.0/project-configuration-dev-test-build.mdx rename to docs/versioned_docs/version-6.x/project-configuration-dev-test-build.mdx index 3b78f9378183..37df966dafeb 100644 --- a/docs/versioned_docs/version-6.0/project-configuration-dev-test-build.mdx +++ b/docs/versioned_docs/version-6.x/project-configuration-dev-test-build.mdx @@ -85,7 +85,7 @@ const config = { module.exports = config ``` -> You can always see Redwood's latest configuration templates in the [create-redwood-app package](https://github.com/redwoodjs/redwood/blob/main/packages/create-redwood-app/template/web/jest.config.js). +> You can always see Redwood's latest configuration templates in the [create-redwood-app package](https://github.com/redwoodjs/redwood/blob/main/packages/create-redwood-app/templates/ts/web/jest.config.js). The preset includes all the setup required to test everything that's going on in web: rendering React components and transforming JSX, automatically mocking Cells, transpiling with Babel, mocking the Router and the GraphQL client—the list goes on! You can find all the details in the [source](https://github.com/redwoodjs/redwood/blob/main/packages/testing/config/jest/web/jest-preset.js). @@ -104,40 +104,28 @@ You can find all the details in the [source](https://github.com/redwoodjs/redwoo You can customize the types that Redwood generates from your project too! This is documented in a bit more detail in the [Generated Types](typescript/generated-types#customising-codegen-config) doc. -## Debugger configuration -The `yarn rw dev` command is configured by default to launch a debugger on the port `18911`, your Redwood app also ships with default configuration to attach a debugger from VSCode. - -Simply run your dev server, then attach the debugger from the "run and debug" panel. Quick demo below: - - +## Debug configurations +### Dev Server +The `yarn rw dev` command is configured by default to open a browser and a debugger on the port `18911` and your redwood app ships with several default configurations to debug with VSCode. -
- -> **ℹ️ Tip: Can't see the "Attach debugger" configuration?** In VSCode -> -> You can grab the latest launch.json from the Redwood template [here](https://github.com/redwoodjs/redwood/blob/main/packages/create-redwood-app/templates/ts/.vscode/launch.json). Copy the contents into your project's `.vscode/launch.json` - - -#### Customizing the debug port -You can choose to use a different debug port in one of two ways: - +#### Customizing the configuration **a) Using the redwood.toml** -Add/change the `debugPort` under your api settings +Add/change the `debugPort` or `open` under your api settings ```toml title="redwood.toml" [web] # . - # . [api] - port = 8911 + # . + // highlight-next-line + debugPort = 18911 # change me! +[browser] // highlight-next-line - debugPort = 18911 # 👈 change me! + open = true # change me! ``` -If you set it to `false`, no debug port will be exposed. The `debugPort` is only ever used during development when running `yarn rw dev` - **b) Pass a flag to `rw dev` command** You can also pass a flag when you launch your dev servers, for example: @@ -149,6 +137,21 @@ The flag passed in the CLI will always take precedence over your setting in the Just remember to also change the port you are attaching to in your `./vscode/launch.json` +### API and Web Debuggers +Simply run your dev server, then attach the debugger from the "run and debug" panel. Quick demo below: + + + +### Compound Debugger +The compound configuration is a combination of the dev, api and web configurations. +It allows you to start all debugging configurations at once, facilitating simultaneous debugging of server and client-side code. + +
+ +> **ℹ️ Tip: Can't see the debug configurations?** In VSCode +> +> You can grab the latest launch.json from the Redwood template [here](https://github.com/redwoodjs/redwood/blob/main/packages/create-redwood-app/templates/ts/.vscode/launch.json). Copy the contents into your project's `.vscode/launch.json` + ## Ignoring the `.yarn` folder The `.yarn` folder contains the most recent Yarn executable that Redwood supports @@ -219,3 +222,17 @@ Admittedly, the `.yarn` folder won't change that often, so this may not be the best example. But we thought we'd share this technique with you so that you'd know how to apply it to any folders that you know change very often, and how to tell VSCode not to bother wasting any CPU cycles on them. + +## Trailing whitespace + +If you're using VS Code, or another editor that supports +[EditorConfig](https://editorconfig.org), trailing whitespace will be trimmed +in source files, but preserved in html, markdown and mjml files when saving. + +This behavior is controlled by `.vscode/settings` or `.editorconfig` depending +on your editor. + +In JavaScript and TypeScript files trailing whitespace has no significance, +but for html, markdown and mjml it does. That's why the behavior is different +for those files. If you don't like the default behavior Redwood has configured +for you, you're free to change the settings in those two files. diff --git a/docs/versioned_docs/version-6.0/quick-start.md b/docs/versioned_docs/version-6.x/quick-start.md similarity index 99% rename from docs/versioned_docs/version-6.0/quick-start.md rename to docs/versioned_docs/version-6.x/quick-start.md index 40f35f9134bf..3fcf885a3324 100644 --- a/docs/versioned_docs/version-6.0/quick-start.md +++ b/docs/versioned_docs/version-6.x/quick-start.md @@ -6,7 +6,7 @@ description: Redwood quick start :::info Prerequisites -- Redwood requires [Node.js](https://nodejs.org/en/) (=18.x) and [Yarn](https://yarnpkg.com/) (>=1.15) +- Redwood requires [Node.js](https://nodejs.org/en/) (=18.x) and [Yarn](https://yarnpkg.com/) (>=1.22.21) - Are you on Windows? For best results, follow our [Windows development setup](how-to/windows-development-setup.md) guide ::: diff --git a/docs/versioned_docs/version-6.x/realtime.md b/docs/versioned_docs/version-6.x/realtime.md new file mode 100644 index 000000000000..39ef6fd5a2c9 --- /dev/null +++ b/docs/versioned_docs/version-6.x/realtime.md @@ -0,0 +1,706 @@ +# Realtime + +One of the most often asked questions of RedwoodJS before and after the launch of V1 was, “When will RedwoodJS support a realtime solution?” + +The answer is: **now**. + +## What is Realtime? + +RedwoodJS's initial real-time solution leverages GraphQL and relies on a serverful deployment to maintain a long-running connection between the client and server. + +:::note +This means that your cannot currently use RedwoodJS Realtime when deployed to Netlify or Vercel. + +**More information about deploying a serverful RedwoodJS application is forthcoming.** +::: + +RedwoodJS's GraphQL Server uses [GraphQL over Server-Sent Events](https://github.com/enisdenjo/graphql-sse/blob/master/PROTOCOL.md#distinct-connections-mode) spec "distinct connections mode" for subscriptions. + +Advantages of SSE over WebSockets include: + +* Transported over simple HTTP instead of a custom protocol +* Built in support for re-connection and event-id +* Simpler protocol +* No trouble with corporate firewalls doing packet inspection + +### Subscriptions and Live Queries + +In GraphQL, there are two options for real-time updates: **live queries** and **subscriptions**. + +Subscriptions are part of the GraphQL specification, whereas live queries are not. + +There are times where subscriptions are well-suited for a realtime problem — and in some cases live queries may be a better fit. Later we’ll explore the pros and cons of each approach and how best to decide that to use and when. + +### Defer and Stream + +[Stream and defer](https://the-guild.dev/graphql/yoga-server/docs/features/defer-stream) are directives that allow you to improve latency for clients by sending data the most important data as soon as it's ready. + +As applications grow, the GraphQL operation documents can get bigger. The server will only send the response back once all the data requested in the query is ready. However, not all requested data is of equal importance, and the client may not need all of the data at once. + +#### Using Defer + +The `@defer`` directive allows you to post-pone the delivery of one or more (slow) fields grouped in an inlined or spread fragment. + +#### Using Stream + +The '@stream' directive allows you to stream the individual items of a field of the list type as the items are available. + +:::info +The `@stream` directive is currently **not** supported by Apollo GraphQL client. +::: + +## Features + +RedwoodJS Realtime handles the hard parts of a GraphQL Realtime implementation by automatically: + +- allowing GraphQL Subscription operations to be handled +- merging in your subscriptions types and mapping their handler functions (subscribe, and resolve) to your GraphQL schema letting you keep your subscription logic organized and apart from services (your subscription my use a service to respond to an event) +- authenticating subscription requests using the same `@requireAuth` directives already protecting other queries and mutations (or you can implement your own validator directive) +- adding in the `@live` query directive to your GraphQL schema and setting up the `useLiveQuery` envelop plugin to handle requests, invalidation, and managing the storage mechanism needed +- creating and configuring in-memory and persisted Redis stores uses by the PubSub transport for subscriptions and Live Queries (and letting you switch between them in development and production) +- placing the pubSub transport and stores into the GraphQL context so you can use them in services, subscription resolvers, or elsewhere (like a webhook, function, or job) to publish an event or invalidate data +- typing you subscription channel event payloads +- support `@defer` and `@stream` directives + +It provides a first-class developer experience for real-time updates with GraphQL so you can easily + +- respond to an event (e.g. NewPost, NewUserNotification) +- respond to a data change (e.g. Post 123's title updated) + +and have the latest data reflected in your app. + +Lastly, the Redwood CLI has commands to + +- generate a boilerplate implementation and sample code needed to create your custom + - subscriptions + - live Queries + +Regardless of the implementation chosen, **a stateful server and store are needed** to track changes, invalidation, or who wants to be informed about the change. + +### What can I build with Realtime? + +- Application Alerts and Messages +- User Notifications +- Live Charts +- Location updates +- Auction bid updates +- Messaging +- OpenAI streaming responses + +## RedwoodJS Realtime Setup + +To setup Realtime in an existing RedwoodJS project, run the following commands: + +* `yarn rw exp setup-server-file` +* `yarn rw exp setup-realtime` + +You will get: + +* `api/server.ts` where you configure your Fastify server and GraphQL +* `api/lib/realtime.ts` where you consume your subscriptions and configure realtime with an in-memory or Redis store +* Usage examples for live queries, subscriptions, defer, and stream. You'll get sdl, services/subscriptions for each. +* The [`auction` live query](#auction-live-query-example) example +* The [`countdown timer` subscription](#countdown-timer-example) example +* The [`chat` subscription](#chatnew-message-example) examples +* The [`alphabet` stream](#alphabet-stream-example) example +* The [`slow and fast` field defer](#slow-and-fast-field-defer-example) example + +:::note +There is no UI setup for these examples. You can find information on how to try them out using the GraphiQL playground. +::: + +### GraphQL Configuration + +Now that how have a serverful project, you will configure your GraphQL server in the `api/server.ts` file. + +:::important +That means you **must** manually configure your GraphQL server accordingly +::: + +For example, you will have to setup any authentication and the realtime config: + +```ts + await fastify.register(redwoodFastifyGraphQLServer, { + // If authenticating, be sure to import and add in + // authDecoder, + // getCurrentUser, + loggerConfig: { + logger: logger, + options: { + query: true, + data: true, + operationName: true, + requestId: true, + }, + }, + graphiQLEndpoint: enableWeb ? '/.redwood/functions/graphql' : '/graphql', + sdls, + services, + directives, + allowIntrospection: true, + allowGraphiQL: true, + // Configure if using RedwoodJS Realtime + realtime, + }) +``` + +You can now remove the GraphQL handler function that resides in `api/functions/graphql.ts`. + +### Realtime Configuration + +By default, RedwoodJS realtime configures an in-memory store for the Pub Sub client used with subscriptions and live query invalidation. + +Realtime supports in-memory and Redis stores: + +- In-memory stores are useful for development and testing. +- Redis stores are useful for production. + +To enable defer and streaming, set `enableDeferStream` to true. + +Configure a Redis store and defer and stream in: + +```ts +// api/lib/realtime.ts +import { RedwoodRealtimeOptions } from '@redwoodjs/realtime' + +import subscriptions from 'src/subscriptions/**/*.{js,ts}' + +// if using a Redis store +// import { Redis } from 'ioredis' +// const publishClient = new Redis() +// const subscribeClient = new Redis() + +/** + * Configure RedwoodJS Realtime + * + * See https://redwoodjs.com/docs/realtime + * + * Realtime supports Live Queries and Subscriptions over GraphQL SSE. + * + * Live Queries are GraphQL queries that are automatically re-run when the data they depend on changes. + * + * Subscriptions are GraphQL queries that are run when a client subscribes to a channel. + * + * Redwood Realtime + * - uses a publish/subscribe model to broadcast data to clients. + * - uses a store to persist Live Query and Subscription data. + * + * Redwood Realtime supports in-memory and Redis stores: + * - In-memory stores are useful for development and testing. + * - Redis stores are useful for production. + */ +export const realtime: RedwoodRealtimeOptions = { + subscriptions: { + subscriptions, + store: 'in-memory', + // if using a Redis store + // store: { redis: { publishClient, subscribeClient } }, + }, + liveQueries: { + store: 'in-memory', + // if using a Redis store + // store: { redis: { publishClient, subscribeClient } }, + }, + // To enable defer and streaming, set to true. + // enableDeferStream: true, +} +``` + +#### PubSub and LiveQueryStore + +By setting up RedwoodJS Realtime, the GraphQL server adds two helpers on the context: + +* pubSub +* liveQueryStory + +With `context.pubSub` you can subscribe to and publish messages via `context.pubSub.publish('the-topic', id, id2)`. + +With `context.liveQueryStore.` you can `context.liveQueryStore.invalidate(key)` where your key may be a reference or schema coordinate: + +##### Reference +Where the query is: `auction(id: ID!): Auction @requireAuth`: + +* `"Auction:123"` + +##### Schema Coordinate +When the query is: `auctions: [Auction!]! @requireAuth`: + +* `"Query.auctions"` + +## Subscriptions + +RedwoodJS has a first-class developer experience for GraphQL subscriptions. + +#### Subscribe to Events + +- Granular information on what data changed +- Why has the data changed? +- Spec compliant + +### Chat/New Message Example + +```graphql +type Subscription { + newMessage(roomId: ID!): Message! @requireAuth +} +``` + +1. I subscribed to a "newMessage” in room “2” +2. Someone added a message to room “2” with a from and body +3. A "NewMessage" event to Room 2 gets published +4. I find out and see who the message is from and what they messaged (the body) + +### Countdown Timer Example + +Counts down from a starting values by an interval. + +```graphql +subscription CountdownFromInterval { + countdown(from: 100, interval: 10) +} +``` + +This example showcases how a subscription can yields its own response. + +## Live Queries + +RedwoodJS has made it super easy to add live queries to your GraphQL server! You can push new data to your clients automatically once the data selected by a GraphQL operation becomes stale by annotating your query operation with the `@live` directive. + +The invalidation mechanism is based on GraphQL ID fields and schema coordinates. Once a query operation has been invalidated, the query is re-executed, and the result is pushed to the client. + +##### Listen for Data Changes + +- I'm not interested in what exactly changed it. +- Just give me the data. +- This is not part of the GraphQL specification. +- There can be multiple root fields. + +### Auction Live Query Example + +```graphql +query GetCurrentAuctionBids @live { + auction(id: "1") { + bids { + amount + } + highestBid { + amount + } + id + title + } +} + +mutation MakeBid { + bid(input: { auctionId: "1", amount: 10 }) { + amount + } +} +``` + +1. I listen for changes to Auction 1 by querying the auction. +2. A bid was placed on Auction 1. +3. The information for Auction 1 is no longer valid. +4. My query automatically refetches the latest Auction and Bid details. + +## Defer Directive + +The `@defer` directive allows you to post-pone the delivery of one or more (slow) fields grouped in an inlined or spread fragment. + +### Slow and Fast Field Defer Example + +Here, the GraphQL schema defines two queries for a "fast" and a "slow" (ie, delayed) information. + +```graphql +export const schema = gql` + type Query { + """ + A field that resolves fast. + """ + fastField: String! @skipAuth + + """ + A field that resolves slowly. + Maybe you want to @defer this field ;) + """ + slowField(waitFor: Int! = 5000): String @skipAuth + } +` +``` + +The Redwood services for these queries return the `fastField` immediately and the `showField` after some delay. + +```ts +import { logger } from 'src/lib/logger' + +const wait = (time: number) => + new Promise((resolve) => setTimeout(resolve, time)) + +export const fastField = async () => { + return 'I am speedy' +} + +export const slowField = async (_, { waitFor = 5000 }) => { + logger.debug('deferring slowField until ...') + await wait(waitFor) + logger.debug('now!') + + return 'I am slow' +} +``` + +When making the query: + +```graphql +query SlowAndFastFieldWithDefer { + ... on Query @defer { + slowField + } + fastField +} +``` + +The response returns: + +```json +{ + "data": { + "fastField": "I am speedy" + } +} +``` + +and will await the deferred field to then present: + +```json +{ + "data": { + "fastField": "I am speedy", + "slowField": "I am slow" + } +} +``` + +## Stream Directive + +The `@stream` directive allows you to stream the individual items of a field of the list type as the items are available. + +### Alphabet Stream Example + +Here, the GraphQL schema defines a query to return the letters of the alphabet: + +```graphql +export const schema = gql` + type Query { + alphabet: [String!]! @skipAuth +` +``` + +The service uses `Repeater` to write a safe stream resolver. + +:::info +[AsyncGenerators](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/AsyncGenerator) as declared via the `async *` keywords are prone to memory leaks and leaking timers. For real-world usage, use Repeater. +::: + +```ts +import { Repeater } from '@redwoodjs/realtime' + +import { logger } from 'src/lib/logger' + +export const alphabet = async () => { + return new Repeater(async (push, stop) => { + const values = ['a', 'b', 'c', 'd', 'e', 'f', 'g'] + const publish = () => { + const value = values.shift() + + if (value) { + logger.debug({ value }, 'publishing') + + push(value) + } + + if (values.length === 0) { + stop() + } + } + + const interval = setInterval(publish, 1000) + + stop.then(() => { + logger.debug('cancel') + clearInterval(interval) + }) + + publish() + }) +} +``` + +### What does the incremental stream look like? + +Since Apollo Client does not yet support the `@stream` directive, you can use them in the GraphiQL Playground or see them in action via CURL. + +When making the request with the `@stream` directive: + +```bash +curl -g -X POST \ + -H "accept:multipart/mixed" \ + -H "content-type: application/json" \ + -d '{"query":"query StreamAlphabet { alphabet @stream }"}' \ + http://localhost:8911/graphql +``` + +Here you see the initial response has `[]` for alphabet data. + +Then on each push to the Repeater, an incremental update the the list of letters is sent. + +The stream ends when `hasNext` is false: + +```bash +* Connected to localhost (127.0.0.1) port 8911 (#0) +> POST /graphql HTTP/1.1 +> Host: localhost:8911 +> User-Agent: curl/8.1.2 +> accept:multipart/mixed +> content-type: application/json +> Content-Length: 53 +> +< HTTP/1.1 200 OK +< connection: keep-alive +< content-type: multipart/mixed; boundary="-" +< transfer-encoding: chunked +< +--- +Content-Type: application/json; charset=utf-8 +Content-Length: 39 + +{"data":{"alphabet":[]},"hasNext":true} +--- +Content-Type: application/json; charset=utf-8 +Content-Length: 70 + +{"incremental":[{"items":["a"],"path":["alphabet",0]}],"hasNext":true} +--- +Content-Type: application/json; charset=utf-8 +Content-Length: 70 + +{"incremental":[{"items":["b"],"path":["alphabet",1]}],"hasNext":true} +--- +Content-Type: application/json; charset=utf-8 +Content-Length: 70 + +{"incremental":[{"items":["c"],"path":["alphabet",2]}],"hasNext":true} +--- +Content-Type: application/json; charset=utf-8 +Content-Length: 70 + +{"incremental":[{"items":["d"],"path":["alphabet",3]}],"hasNext":true} +--- +Content-Type: application/json; charset=utf-8 +Content-Length: 70 + +{"incremental":[{"items":["e"],"path":["alphabet",4]}],"hasNext":true} +--- +Content-Type: application/json; charset=utf-8 +Content-Length: 70 + +{"incremental":[{"items":["f"],"path":["alphabet",5]}],"hasNext":true} +--- +Content-Type: application/json; charset=utf-8 +Content-Length: 70 + +{"incremental":[{"items":["g"],"path":["alphabet",6]}],"hasNext":true} +--- +... + +--- +Content-Type: application/json; charset=utf-8 +Content-Length: 17 + +{"hasNext":false} +----- +``` + +## How do I choose Subscriptions or Live Queries? + +![image](https://github.com/ahaywood/redwoodjs-streaming-realtime-demos/assets/1051633/e3c51908-434c-4396-856a-8bee7329bcdd) + +When deciding on how to offer realtime data updates in your RedwoodJS app, you’ll want to consider: + +- How frequently do your users require information updates? + - Determine the value of "real-time" versus "near real-time" to your users. Do they need to know in less than 1-2 seconds, or is 10, 30, or 60 seconds acceptable for them to receive updates? + - Consider the criticality of the data update. Is it low, such as a change in shipment status, or higher, such as a change in stock price for an investment app? + - Consider the cost of maintaining connections and tracking updates across your user base. Is the infrastructure cost justifiable? + - If you don't require "real" real-time, consider polling for data updates on a reasonable interval. According to Apollo, [in most cases](https://www.apollographql.com/docs/react/data/subscriptions/), your client should not use subscriptions to stay up to date with your backend. Instead, you should poll intermittently with queries or re-execute queries on demand when a user performs a relevant action, such as clicking a button. +- How are you deploying? Serverless or Serverful? + - Real-time options depend on your deployment method. + - If you are using a serverless architecture, your application cannot maintain a stateful connection to your users' applications. Therefore, it's not easy to "push," "publish," or "stream" data updates to the web client. + - In this case, you may need to look for third-party solutions that manage the infrastructure to maintain such stateful connections to your web client, such as [Supabase Realtime](https://supabase.com/realtime), [SendBird](https://sendbird.com/), [Pusher](https://pusher.com/), or consider creating your own [AWS SNS-based](https://docs.aws.amazon.com/sns/latest/dg/welcome.html) functionality. + + + +## Showcase Demos + +Please see our [showcase RedwoodJS Realtime app](https://realtime-demo.fly.dev) for exampes of subscriptions and live queries. It also demonstrates how you can handle streaming responses, like those used by OpenAI chat completions. + +### Chat Room (Subscription) + +Sends a message to one of four Chat Rooms. + +Each room subscribes to its new messages via the `NewMessage` channel aka topic. + +```ts +context.pubSub.publish('newMessage', roomId, { from, body }) +``` + +#### Simulate + +```bash +./scripts/simulate_chat.sh -h +Usage: ./scripts/simulate_chat.sh -r [roomId] -n [num_messages] + ./scripts/simulate_chat.sh -h + +Options: + -r roomId Specify the room ID (1-4) for sending chat messages. + -n num_messages Specify the number of chat messages to send. If not provided, the script will run with a random number of messages. +``` +#### Test + +```ts +/** + * To test this NewMessage subscription, run the following in one GraphQL Playground to subscribe: + * + * subscription ListenForNewMessagesInRoom { + * newMessage(roomId: "1") { + * body + * from + * } + * } + * + * + * And run the following in another GraphQL Playground to publish and send a message to the room: + * + * mutation SendMessageToRoom { + * sendMessage(input: {roomId: "1", from: "hello", body: "bob"}) { + * body + * from + * } + * } + */ + ``` + +### Auction Bids (Live Query) + +Bid on a fancy pair of new sneaks! + +When a bid is made, the auction updates via a Live Query due to the invalidation of the auction key. + +```ts + const key = `Auction:${auctionId}` + context.liveQueryStore.invalidate(key) + ``` + +#### Simulate + +```bash +./scripts/simulate_bids.sh -h +Usage: ./scripts/simulate_bids.sh [options] + +Options: + -a Specify the auction ID (1-5) for which to send bids (optional). + -n Specify the number of bids to send (optional). + -h, --help Display this help message. + ``` + +#### Test + +```ts +/** + * To test this live query, run the following in the GraphQL Playground: + * + * query GetCurrentAuctionBids @live { + * auction(id: "1") { + * bids { + * amount + * } + * highestBid { + * amount + * } + * id + * title + * } + * } + * + * And then make a bid with the following mutation: + * + * mutation MakeBid { + * bid(input: {auctionId: "1", amount: 10}) { + * amount + * } + * } + */ +``` + +### Countdown (Streaming Subscription) + +> It started slowly and I thought it was my heart +> But then I realised that this time it was for real + +Counts down from a starting values by an interval. + +This example showcases how a subscription can yields its own response. + +#### Test + +```ts +/** + * To test this Countdown subscription, run the following in the GraphQL Playground: + * + * subscription CountdownFromInterval { + * countdown(from: 100, interval: 10) + * } + */ +``` + +### Bedtime Story (Subscription with OpenAI Streaming) + +> Tell me a story about a happy, purple penguin that goes to a concert. + +Showcases how to use OpenAI to stream a chat completion via a prompt that writes a bedtime story: + +```ts +const PROMPT = `Write a short children's bedtime story about an Animal that is a given Color and that does a given Activity. + +Give the animal a cute descriptive and memorable name. + +The story should teach a lesson. + +The story should be told in a quality, style and feeling of the given Adjective. + +The story should be no longer than 3 paragraphs. + +Format the story using Markdown.` + +``` + +The story updates on each stream content delta via a `newStory` subscription topic event. + +```ts +context.pubSub.publish('newStory', id, story) +``` + +### Movie Mashup (Live Query with OpenAI Streaming) + +> It's Out of Africa meets Pretty Woman. + +> So it's a psychic, political, thriller comedy with a heart With a heart, not unlike Ghost meets Manchurian Candidate. + +-- The Player, 1992 + +Mashup some of your favorite movies to create something new and Netflix-worthy to watch. + +Powered by OpenAI, this movie tagline and treatment updates on each stream content delta via a Live Query bui invalidating the `MovieMashup key. + +```ts +context.liveQueryStore.invalidate(`MovieMashup:${id}`) +``` diff --git a/docs/versioned_docs/version-6.0/redwoodrecord.md b/docs/versioned_docs/version-6.x/redwoodrecord.md similarity index 100% rename from docs/versioned_docs/version-6.0/redwoodrecord.md rename to docs/versioned_docs/version-6.x/redwoodrecord.md diff --git a/docs/versioned_docs/version-6.0/router.md b/docs/versioned_docs/version-6.x/router.md similarity index 94% rename from docs/versioned_docs/version-6.0/router.md rename to docs/versioned_docs/version-6.x/router.md index 98d1493d9ab9..a4d48c6d0c56 100644 --- a/docs/versioned_docs/version-6.0/router.md +++ b/docs/versioned_docs/version-6.x/router.md @@ -44,9 +44,7 @@ The `path` prop specifies the URL path to match, starting with the beginning sla ## Private Routes -Some pages should only be visible to authenticated users. - -We support this using private `` component. Read more [further down](#private-set). +Some pages should only be visible to authenticated users. We support this using the `PrivateSet` component. Read more [further down](#privateset). ## Sets of Routes @@ -89,7 +87,7 @@ Conceptually, this fits with how we think about Context and Layouts as things th There's a lot of flexibility here. You can even nest `Sets` to great effect: ```jsx title="Routes.js" -import { Router, Route, Set, Private } from '@redwoodjs/router' +import { Router, Route, Set } from '@redwoodjs/router' import BlogContext from 'src/contexts/BlogContext' import BlogLayout from 'src/layouts/BlogLayout' import BlogNavLayout from 'src/layouts/BlogNavLayout' @@ -130,24 +128,11 @@ becomes... ``` -### `private` Set +### `PrivateSet` -Sets can take a `private` prop which makes all Routes inside that Set require authentication. When a user isn't authenticated and attempts to visit one of the Routes in the private Set, they'll be redirected to the Route passed as the Set's `unauthenticated` prop. The originally-requested Route's path is added to the query string as a `redirectTo` param. This lets you send the user to the page they originally requested once they're logged-in. +A `PrivateSet` makes all Routes inside that Set require authentication. When a user isn't authenticated and attempts to visit one of the Routes in the `PrivateSet`, they'll be redirected to the Route passed as the `PrivateSet`'s `unauthenticated` prop. The originally-requested Route's path is added to the query string as a `redirectTo` param. This lets you send the user to the page they originally requested once they're logged-in. -Here's an example of how you'd use a private set: - -```jsx title="Routes.js" - - - - - - -``` - -Private routes are important and should be easy to spot in your Routes file. The larger your Routes file gets, the more difficult it will probably become to find `` among your other Sets. So we also provide a `` component that's just an alias for ``. Most of our documentation uses ``. - -Here's the same example again, but now using `` +Here's an example of how you'd use a `PrivateSet`: ```jsx title="Routes.js" @@ -160,7 +145,7 @@ Here's the same example again, but now using `` For more fine-grained control, you can specify `roles` (which takes a string for a single role or an array of roles), and the router will check to see that the current user is authorized before giving them access to the Route. If they're not, they will be redirected to the page specified in the `unauthenticated` prop, such as a "forbidden" page. Read more about Role-based Access Control in Redwood [here](how-to/role-based-access-control.md). -To protect `PrivateSet` routes for access by a single role: +To protect private routes for access by a single role: ```jsx title="Routes.js" @@ -172,7 +157,7 @@ To protect `PrivateSet` routes for access by a single role: ``` -To protect `PrivateSet` routes for access by multiple roles: +To protect private routes for access by multiple roles: ```jsx title="Routes.js" @@ -261,7 +246,9 @@ More granular match, `page` key only and `tab=tutorial` activeMatchParams={[{ tab: 'tutorial' }, 'page' ]} ``` -You can `useMatch` to create your own component with active styles. +### useMatch + +You can use `useMatch` to create your own component with active styles. > `NavLink` uses it internally! @@ -528,7 +515,7 @@ Redwood will detect your explicit import and refrain from splitting that page in Because lazily-loaded pages can take a non-negligible amount of time to load (depending on bundle size and network connection), you may want to show a loading indicator to signal to the user that something is happening after they click a link. -In order to show a loader as your page chunks are loading, you simply add the `whileLoadingPage` prop to your route, `Set` or `Private` component. +In order to show a loader as your page chunks are loading, you simply add the `whileLoadingPage` prop to your route, `Set` or `PrivateSet` component. ```jsx title="Routes.js" import SkeletonLoader from 'src/components/SkeletonLoader' @@ -574,7 +561,7 @@ When the lazy-loaded page is loading, `PageLoadingContext.Consumer` will pass `{ Let's say you have a dashboard area on your Redwood app, which can only be accessed after logging in. When Redwood Router renders your private page, it will first fetch the user's details, and only render the page if it determines the user is indeed logged in. -In order to display a loader while auth details are being retrieved you can add the `whileLoadingAuth` prop to your private ``, `` component: +In order to display a loader while auth details are being retrieved you can add the `whileLoadingAuth` prop to your `PrivateSet` component: ```jsx //Routes.js @@ -677,7 +664,7 @@ Note that if you're copy-pasting this example, it uses [Tailwind CSS](https://ta :::note Can I customize the development one? -As it's part of the RedwoodJS framework, you can't. But if there's a feature you want to add, let us know on the [forums](https://community.redwoodjs.com/). +As it's part of the RedwoodJS framework, you can't _change_ the dev fatal error page, but you can always build your own that takes the same props. If there's a feature you want to add to the built-in version, let us know on the [forums](https://community.redwoodjs.com/). ::: diff --git a/docs/versioned_docs/version-6.0/schema-relations.md b/docs/versioned_docs/version-6.x/schema-relations.md similarity index 95% rename from docs/versioned_docs/version-6.0/schema-relations.md rename to docs/versioned_docs/version-6.x/schema-relations.md index 8c5e65b71358..0d0b3add404b 100644 --- a/docs/versioned_docs/version-6.0/schema-relations.md +++ b/docs/versioned_docs/version-6.x/schema-relations.md @@ -39,7 +39,7 @@ model Tag { } ``` -These relationships can be [implicit](https://www.prisma.io/docs/concepts/components/prisma-schema/relations#implicit-many-to-many-relations) (as this diagram shows) or [explicit](https://www.prisma.io/docs/concepts/components/prisma-schema/relations#explicit-many-to-many-relations) (explained below). Redwood's SDL generator (which is also used by the scaffold generator) only supports an **explicit** many-to-many relationship when generating with the `--crud` flag. What's up with that? +These relationships can be [implicit](https://www.prisma.io/docs/concepts/components/prisma-schema/relations/many-to-many-relations#implicit-many-to-many-relations) (as this diagram shows) or [explicit](https://www.prisma.io/docs/concepts/components/prisma-schema/relations/many-to-many-relations#explicit-many-to-many-relations) (explained below). Redwood's SDL generator (which is also used by the scaffold generator) only supports an **explicit** many-to-many relationship when generating with the `--crud` flag. What's up with that? ## CRUD Requires an `@id` diff --git a/docs/versioned_docs/version-6.0/security.md b/docs/versioned_docs/version-6.x/security.md similarity index 100% rename from docs/versioned_docs/version-6.0/security.md rename to docs/versioned_docs/version-6.x/security.md diff --git a/docs/versioned_docs/version-6.x/seo-head.md b/docs/versioned_docs/version-6.x/seo-head.md new file mode 100644 index 000000000000..7cb5772edd62 --- /dev/null +++ b/docs/versioned_docs/version-6.x/seo-head.md @@ -0,0 +1,356 @@ +--- +description: Use meta tags to set page info for SEO +--- + +# SEO & `` tags + +Search Engine Optimization is a dark art that some folks dedicate their entire lives to. We've add a couple of features to Redwood to make HTML-based SEO fairly simple. + +## Adding a Title + +You certainly want to change the title of your Redwood app from the default of "Redwood App." You can start by adding or modifying `title` inside of `/redwood.toml` + +```diff title=redwood.toml +[web] +- title = "Redwood App" ++ title = "My Cool App" + port = 8910 + apiUrl = "/.redwood/functions" +``` + +This title (the app title) is used by default for all your pages if you don't define another one. +It will also be use for the title template. + +### Title Template + +Now that you have the app title set, you probably want some consistence with the page title, that's what the title template is for. + +Add `titleTemplate` as a prop for `RedwoodProvider` to have a title template for every pages + +```diff title=web/src/App.(tsx|jsx) +- ++ + /* ... */ + +``` + +You can use whatever formatting you'd like in here. Some examples: + +```jsx +"%PageTitle | %AppTitle" => "Home Page | Redwood App" + +"%AppTitle · %PageTitle" => "Redwood App · Home Page" + +"%PageTitle : %AppTitle" => "Home Page : Redwood App" +``` + +## Adding to Page `` + +So you want to change the title of your page, or add elements to the `` of the page? We've got you! + +Let's say you want to change the title of your About page, Redwood provides a built in `` component, which you can use like this: + +```diff title=web/src/pages/AboutPage/AboutPage.(tsx|jsx) ++import { Head } from '@redwoodjs/web' + +const AboutPage = () => { + return ( +
+

AboutPage

++ ++ About the team ++ +``` + +You can include any valid `` tag in here that you like. However, Redwood also provides a utility component [<Metadata>](#setting-meta-tags-and-opengraph-directives-with-metadata). + +:::caution `` Deprecation + +Prior to Redwood 6.6.0 this component was called `` and had several special hard-coded props like `ogContentUrl`, which didn't properly map to the OpenGraph spec. We'll still render `` for the foreseeable future, but it's deprecated and you should migrate to `` if you have an existing app. + +::: + +### What About Nested Tags? + +Redwood uses [react-helmet-async](https://github.com/staylor/react-helmet-async) underneath, which will use the tags furthest down your component tree. + +For example, if you set title in your Layout, and a title in your Page, it'll render the one in Page - this way you can override the tags you wish, while sharing the tags defined in Layout. + +:::info Bots & `` Tags + +For these headers to appear to bots and scrapers e.g. for twitter to show your title, you have to make sure your page is prerendered. If your content is static you can use Redwood's built in [Prerender](prerender.md). For dynamic tags, check the [Dynamic head tags](#dynamic-tags) + +::: + +## Setting `` Tags and OpenGraph Directives with `` + +Often we want to set more than just the title and description of the page—most commonly [OpenGraph](https://ogp.me/) headers. + +Redwood provides a convenience component `` to help you create most of these `` tags for you with a more concise syntax. But, you can also pass children and define any custom content that you want. + +Here's an example setting some common meta, including a page title, description, `og:image` and an `http-equiv`: + +```jsx +import { Metadata } from '@redwoodjs/web' + +const AboutPage = () => { + return ( +
+ + + + +

About Page

+

This is the about page!

+
+ ) +} + +export default AboutPage +``` + +This code would be transformed into this HTML and injected into the `` tag: + +```html +About page + + + + + + + + + +``` + +Setting an `og:image` is how sites like Facebook and Slack can show a preview of a URL when pasted into a post (also known as "unfurling"): + +![Typical URL unfurl](/img/facebook_unfurl.png) + +Sites like GitHub go a step farther than a generic image by actually creating an image for a repo on the fly, including details about the repo itself: + +![GitHub's og:image for the redwood repo](https://opengraph.githubassets.com/322ce8081bb85a86397a59494eab1c0fbe942b5104461f625e2c973c46ae4179/redwoodjs/redwood) + +If you want to write your own `` tags, skipping the interpolation that `` does for you, you can pass them as children to `` or just write them into the `` tag as normal. + +### `` Props + +For the most part `` creates simple `` tags based on the structure of the props you pass in. There are a couple of special behaviors described below. + +#### Plain Key/Value Props + +Any "plain" key/value prop will be turned into a `` tag with `name` and `content` attributes: + +```jsx + +// generates + +``` + +Child elements are just copied 1:1 to the resulting output: + +```jsx + + + +// generates + + +``` + +#### Passing Objects to Props + +Any props that contain an object will create a `` tag with `property` and `content` attributes, and the `property` being the names of the nested keys with a `:` between each: + +```jsx + +// generates + +``` + +This is most commonly used to create the "nested" structure that a spec like OpenGraph uses: + +```jsx + +// generates + +``` + +You can create multiple `` tags with the same name/property (allowed by the OpenGraph spec) by using an array: + +```jsx + +// generates + + +``` + +You can combine nested objects with strings to create any structure you like: + +```jsx + +// generates + + + + + + + +``` + +#### Special OpenGraph Helpers + +If you define _any_ `og` prop, we will copy any `title` and `description` to an `og:title` and `og:description`: + +```jsx + +// generates + + +``` + +You can override this behavior by explicitly setting `og:title` or `og:description` to `null`: + +```jsx + +// generates + +``` + +Of course, if you don't want any auto-generated `og` tags, then don't include any `og` prop at all! + +In addition to `og:title` and `og:description`, if you define _any_ `og` prop we will generate an `og:type` set to `website`: + +```jsx + +// generates + +``` + +You can override the `og:type` by setting it directly: + +```jsx + +// generates + +``` + +#### Other Special Cases + +If you define a `title` prop we will automatically prepend a `` tag to the output: + +```jsx +<Metadata title="My Website" /> +// generates +<title>My Website + +``` + +If you define a `charSet` prop we will create a `` tag with the `charset` attribute: + +```jsx + +// generates + +``` + +We simplified some of the examples above by excluding the generated `` and `og:type` tags, so here's the real output if you included `title` and `og` props: + +```jsx +<Metadata title="My Website" og /> +// generates +<title>My Website + + + + +``` + +:::info Do I need to apply these same tags over and over in every page? + +Some `` tags, like `charset` or `locale` are probably applicable to the entire site, in which case it would be simpler to just include these once in your `index.html` instead of having to set them manually on each and every page/cell of your site. + +::: + +This should allow you to create a fairly full-featured set of `` tags with minimal special syntax! A typical `` invocation could look like: + +```jsx + +``` + +## Dynamic tags + +Bots will pick up our tags if we've prerendered the page, but what if we want to set the `` based on the output of the Cell? + +:::info Prerendering + +As of v3.x, Redwood supports prerendering your [Cells](https://redwoodjs.com/docs/cells) with the data you were querying. For more information please refer [to this section](https://redwoodjs.com/docs/prerender#cell-prerendering). + +::: + +Let's say in our `PostCell`, we want to set the title to match the `Post`. + +```jsx +import { Metadata } from '@redwoodjs/web' + +import Post from 'src/components/Post/Post' + +export const QUERY = gql` + query FindPostById($id: Int!) { + post: post(id: $id) { + title + snippet + author { + name + } + } + } +` + +export const Loading = /* ... */ + +export const Empty = /* ... */ + +export const Success = ({ post }) => { + return ( + <> + + + + ) +} +``` + +Once the `Success` component renders, it will update your page's `` and set the relevant `<meta>` tags for you! diff --git a/docs/versioned_docs/version-6.0/serverless-functions.md b/docs/versioned_docs/version-6.x/serverless-functions.md similarity index 98% rename from docs/versioned_docs/version-6.0/serverless-functions.md rename to docs/versioned_docs/version-6.x/serverless-functions.md index fc327a39063e..c0c53fc8a846 100644 --- a/docs/versioned_docs/version-6.0/serverless-functions.md +++ b/docs/versioned_docs/version-6.x/serverless-functions.md @@ -2,10 +2,19 @@ description: Create, develop, and run serverless functions --- -# Serverless Functions +# Serverless Functions (API Endpoints) <!-- `redwood.toml`—`api/src/functions` by default. --> + +:::info + +You can think of serverless functions as API Endpoints, and in the future we'll update the terminology used. + +Originally, Redwood apps were intended to be deployed as serverless functions to AWS Lambda. Whenever a Redwood app is deployed to a "serverful" environment such as Fly or Render, a Fastify server is started and your Redwood app's functions in `api/src/functions` are automatically registered onto the server. Request adapters are also automatically configured to handle the translation between Fastify's request and reply objects to the functions' AWS Lambda signature. + +::: + Redwood looks for serverless functions in `api/src/functions`. Each function is mapped to a URI based on its filename. For example, you can find `api/src/functions/graphql.js` at `http://localhost:8911/graphql`. ## Creating Serverless Functions @@ -34,12 +43,6 @@ export const handler = async (event, context) => { } ``` -:::info - -We call them 'serverless' but they can also be used on 'serverful' hosted environments too, such as Render or Heroku. - -::: - ## The handler For a lambda function to be a lambda function, it must export a handler that returns a status code. The handler receives two arguments: `event` and `context`. Whatever it returns is the `response`, which should include a `statusCode` at the very least. diff --git a/docs/versioned_docs/version-6.0/services.md b/docs/versioned_docs/version-6.x/services.md similarity index 98% rename from docs/versioned_docs/version-6.0/services.md rename to docs/versioned_docs/version-6.x/services.md index eecf04215ded..52bb916f0a8a 100644 --- a/docs/versioned_docs/version-6.0/services.md +++ b/docs/versioned_docs/version-6.x/services.md @@ -618,7 +618,7 @@ validate(input.value, 'Value', { ``` ### validateWithSync() -`validateWith()` is simply given a function to execute. This function should throw with a message if there is a problem, otherwise do nothing. +`validateWithSync()` is simply given a function to execute. This function should throw with a message if there is a problem, otherwise do nothing. ```jsx validateWithSync(() => { @@ -636,14 +636,14 @@ validateWithSync(() => { Either of these errors will be caught and re-thrown as a `ServiceValidationError` with your text as the `message` of the error (although technically you should always throw errors with `new Error()` like in the second example). -You could just write your own function and throw whatever you like, without using `validateWith()`. But, when accessing your Service function through GraphQL, that error would be swallowed and the user would simply see "Something went wrong" for security reasons: error messages could reveal source code or other sensitive information so most are hidden. Errors thrown by Service Validations are considered "safe" and allowed to be shown to the client. +You could just write your own function and throw whatever you like, without using `validateWithSync()`. But, when accessing your Service function through GraphQL, that error would be swallowed and the user would simply see "Something went wrong" for security reasons: error messages could reveal source code or other sensitive information so most are hidden. Errors thrown by Service Validations are considered "safe" and allowed to be shown to the client. -### validateWithSync() +### validateWith() -The same behavior as `validateWithSync()` but works with Promises. +The same behavior as `validateWithSync()` but works with Promises. Remember to `await` the validation. ```jsx -validateWithSync(async () => { +await validateWith(async () => { if (await db.products.count() >= 100) { throw "There can only be a maximum of 100 products in your store" } @@ -1114,7 +1114,7 @@ const updateUser = async ({ id, input }) => { }) ``` -:::caution +:::warning When explicitly deleting cache keys like this you could find yourself going down a rabbit hole. What if there is another service somewhere that also updates user? Or another service that updates an organization, as well as all of its underlying child users at the same time? You'll need to be sure to call `deleteCacheKey()` in these places as well. As a general guideline, it's better to come up with a cache key that encapsulates any triggers for when the data has changed (like the `updatedAt` timestamp, which will change no matter who updates the user, anywhere in your codebase). diff --git a/docs/versioned_docs/version-6.0/storybook.md b/docs/versioned_docs/version-6.x/storybook.md similarity index 100% rename from docs/versioned_docs/version-6.0/storybook.md rename to docs/versioned_docs/version-6.x/storybook.md diff --git a/docs/versioned_docs/version-6.0/testing.md b/docs/versioned_docs/version-6.x/testing.md similarity index 98% rename from docs/versioned_docs/version-6.0/testing.md rename to docs/versioned_docs/version-6.x/testing.md index 426038a6b85a..13bebbcec89d 100644 --- a/docs/versioned_docs/version-6.0/testing.md +++ b/docs/versioned_docs/version-6.x/testing.md @@ -274,6 +274,7 @@ render(<Article article={ title: 'Foobar' } />, { }) ``` ::: + ### Mocking useLocation To mock `useLocation` in your component tests, wrap the component with `LocationProvider`: @@ -288,6 +289,22 @@ render( ) ``` +### Mocking useParams + +To mock `useParams` in your component tests, wrap the component with `ParamsProvider`: + +```jsx +import { ParamsProvider } from '@redwoodjs/router'; + +render( + <ParamsProvider allParams={{ param1: 'val1', param2: 'val2' }}> + <Component /> + </ParamsProvider> +) +``` + +The `allParams` argument accepts an object that will provide parameters as you expect them from the query parameters of a URL string. In the above example, we are assuming the URL looks like `/?param1=val1¶m2=val2`. + ### Queries In most cases you will want to exclude the design elements and structure of your components from your test. Then you're free to redesign the component all you want without also having to make the same changes to your test suite. Let's look at some of the functions that React Testing Library provides (they call them "[queries](https://testing-library.com/docs/queries/about/)") that let you check for *parts* of the rendered component, rather than a full string match. @@ -1131,9 +1148,9 @@ export default NameForm Now, we can extend the `test` file which Redwood generated. We're going to want to: -1) Import `waitFor` from the `@redwoodjs/testing/web` library. -2) Add an import to `@testing-library/user-event` for its `default`. -3) Provide an `onSubmit` prop to our "renders successfully" test. +1. Import `waitFor` from the `@redwoodjs/testing/web` library. +2. Add an import to `@testing-library/user-event` for its `default`. +3. Provide an `onSubmit` prop to our "renders successfully" test. ```jsx title="NameForm.test.js" import { render, screen, waitFor } from '@redwoodjs/testing/web' @@ -1154,9 +1171,9 @@ describe('NameForm', () => { Finally, we'll create three simple tests which ensure our form works as expected. -1) Does our component NOT submit when required fields are empty? -2) Does our component submit when required fields are populated? -3) Does our component submit, passing our (submit) handler the data we entered? +1. Does our component NOT submit when required fields are empty? +2. Does our component submit when required fields are populated? +3. Does our component submit, passing our (submit) handler the data we entered? The important takeaways are: @@ -1261,7 +1278,7 @@ Does anyone else find it confusing that the software itself is called a "databas When you start your test suite you may notice some output from Prisma talking about migrating the database. Redwood will automatically run `yarn rw prisma db push` against your test database to make sure it's up-to-date. -:::caution What if I have custom migration SQL? +:::warning What if I have custom migration SQL? The `prisma db push` command only restores a snapshot of the current database schema (so that it runs as fast as possible). **It does not actually run migrations in sequence.** This can cause a [problem](https://github.com/redwoodjs/redwood/issues/5818) if you have certain database configuration that *must* occur as a result of the SQL statements inside the migration files. @@ -1958,6 +1975,16 @@ console.log(testCacheClient.storage) This is mainly helpful when you are testing for a very specific value, or have edgecases in how the serialization/deserialization works in the cache. +## Testing Mailer + +If your project uses [RedwoodJS Mailer](./mailer.md) to send emails, you can [also write tests](./mailer.md#testing) to make sure that email: + +* is sent to an sandbox inbox +* renders properly +* sets the expected to, from, cc, bcc, subject attributes based on the email sending logic +* checks that the html and text content is set correctly + +Since these tests send mail to a sandbox inbox, you can be confident that no emails accidentally get sent into the wild as part of your test or CI runs. ## Wrapping Up diff --git a/docs/versioned_docs/version-6.x/toast-notifications.md b/docs/versioned_docs/version-6.x/toast-notifications.md new file mode 100644 index 000000000000..9b22ee524bd4 --- /dev/null +++ b/docs/versioned_docs/version-6.x/toast-notifications.md @@ -0,0 +1,133 @@ +--- +description: Toast notifications with react-hot-toast +--- + +# Toast Notifications + +Did you know that those little popup notifications that you sometimes see at the top of a page after you've performed an action are affectionately known as "toast" notifications? +Because they pop up like a piece of toast from a toaster! + +![Example toast animation](https://user-images.githubusercontent.com/300/110032806-71024680-7ced-11eb-8d69-7f462929815e.gif) + +Redwood supports these notifications out of the box thanks to the [react-hot-toast](https://react-hot-toast.com/) package. +We'll refer you to their [docs](https://react-hot-toast.com/docs) since they're very thorough, but here's enough to get you going. + +### Add the `Toaster` Component + +To render toast notifications, start by adding the `Toaster` component. +It's usually better to add it at the App or Layout-level than the Page: + +```jsx title="web/src/layouts/MainLayout/MainLayout.js" +// highlight-next-line +import { Toaster } from '@redwoodjs/web/toast' + +const MainLayout = ({ children }) => { + return ( + <> + // highlight-next-line + <Toaster /> + <main>{children}</main> + </> + ) +} + +export default MainLayout +``` + +### Call the `toast` function + +To render a basic toast notification with default styles, call the `toast` function: + +```jsx title="web/src/layouts/MainLayout/MainLayout.js" +import { toast } from '@redwoodjs/web/toast' + +// ... + +const PostForm = () => { + const [create, { loading, error }] = useMutation(CREATE_POST_MUTATION) + + const onSubmit = async (data) => { + try { + await create({ variables: { input: data }}) + // highlight-next-line + toast('Post created') + } + catch (e) { + // highlight-next-line + toast('Error creating post') + } + } + + return ( + // <Form onSubmit={onSubmit}> ... </Form> + ) +}) + +export default PostForm +``` + +### Call the `toast` variants + +To render a toast notification with default icons and default styles, call the `toast` variants: + +```jsx title="web/src/components/PostForm/PostForm.js" +import { toast } from '@redwoodjs/web/toast' + +// ... + +const PostForm = () => { + const [create, { loading, error }] = useMutation(CREATE_POST_MUTATION, { + onCompleted: () => { + // highlight-next-line + toast.success('Post created') + } + onError: () => { + // highlight-next-line + toast.error('Error creating post') + } + }) + + const onSubmit = (data) => { + create({ variables: { input: data }}) + } + + return ( + // <Form onSubmit={onSubmit}> ... </Form> + ) +}) + +export default PostForm +``` + +or render an async toast by calling the `toast.promise` function: + +```jsx title="web/src/components/PostForm/PostForm.js" +import { toast } from '@redwoodjs/web/toast' + +// ... + +const PostForm = () => { + const [create, { loading, error }] = useMutation(CREATE_POST_MUTATION) + + const onSubmit = (data) => { + // highlight-next-line + toast.promise(create({ variables: { input: data }}), { + loading: 'Creating post...', + success: 'Post created', + error: 'Error creating post', + }) + } + + return ( + // <Form onSubmit={onSubmit}> ... </Form> + ) +}) + +export default PostForm +``` + +:::warning + +You can't use the [onError](https://www.apollographql.com/docs/react/api/react/hooks/#onerror) callback in combination with the `toast.promise` function. + +::: diff --git a/docs/versioned_docs/version-6.0/tutorial/afterword.md b/docs/versioned_docs/version-6.x/tutorial/afterword.md similarity index 100% rename from docs/versioned_docs/version-6.0/tutorial/afterword.md rename to docs/versioned_docs/version-6.x/tutorial/afterword.md diff --git a/docs/versioned_docs/version-6.0/tutorial/chapter0/what-is-redwood.md b/docs/versioned_docs/version-6.x/tutorial/chapter0/what-is-redwood.md similarity index 91% rename from docs/versioned_docs/version-6.0/tutorial/chapter0/what-is-redwood.md rename to docs/versioned_docs/version-6.x/tutorial/chapter0/what-is-redwood.md index 652f36148400..e779cafefdf2 100644 --- a/docs/versioned_docs/version-6.0/tutorial/chapter0/what-is-redwood.md +++ b/docs/versioned_docs/version-6.x/tutorial/chapter0/what-is-redwood.md @@ -28,10 +28,10 @@ You can start them both with a single command: `yarn redwood dev` ### The Router -When you open your web app in a browser, React does its thing initializing your app and monitoring the history for changes so that new content can be shown. Redwood features a custom, declaritive Router that lets you specify URLs and the requisite pages (just a React component) will be shown. A simple routes file may look something like: +When you open your web app in a browser, React does its thing initializing your app and monitoring the history for changes so that new content can be shown. Redwood features a custom, declarative Router that lets you specify URLs and the requisite pages (just a React component) will be shown. A simple routes file may look something like: ```jsx -import { Set, Router, Route } from '@redwoodjs/router' +import { Route, Router, Set, PrivateSet } from '@redwoodjs/router' import ApplicationLayout from 'src/layouts/ApplicationLayout' import { useAuth } from './auth' @@ -41,10 +41,10 @@ const Routes = () => { <Set wrap={ApplicationLayout}> <Route path="/login" page={LoginPage} name="login" /> <Route path="/signup" page={SignupPage} name="signup" /> - <Private unauthenticated="login"> + <PrivateSet unauthenticated="login"> <Route path="/dashboard" page={DashboardPage} name="dashboard" /> <Route path="/products/{sku}" page={ProductsPage} name="products" /> - </Private> + </PrivateSet> </Set> <Route path="/" page={HomePage} name="home" /> @@ -54,7 +54,7 @@ const Routes = () => { } ``` -You can probably get a sense of how all of this works without ever having seen a Redwood route before! Some routes can be marked as `<Private>` and will not be accessible without being logged in. Others can be wrapped in a "layout" (again, just a React component) to provide common styling shared between pages in your app. +You can probably get a sense of how all of this works without ever having seen a Redwood route before! Some routes can be marked as `<PrivateSet>` and will not be accessible without being logged in. Others can be wrapped in a "layout" (again, just a React component) to provide common styling shared between pages in your app. #### Prerender @@ -66,7 +66,7 @@ This is Redwood's version of static site generation, aka SSG. ### Authentication -The `<Private>` route limits access to users that are authenticated, but how do they authenticate? Redwood includes integrations to many popular third party authentication hosts (including [Auth0](https://auth0.com/), [Supabase](https://supabase.com/docs/guides/auth) and [Clerk](https://clerk.com/)). You can also [host your own auth](https://redwoodjs.com/docs/auth/dbauth), or write your own [custom authentication](https://redwoodjs.com/docs/auth/custom) option. If going self-hosted, we include login, signup, and reset password pages, as well as the option to include TouchID/FaceID and third party biometric readers! +The `<PrivateSet>` route limits access to users that are authenticated, but how do they authenticate? Redwood includes integrations to many popular third party authentication hosts (including [Auth0](https://auth0.com/), [Supabase](https://supabase.com/docs/guides/auth) and [Clerk](https://clerk.com/)). You can also [host your own auth](https://redwoodjs.com/docs/auth/dbauth), or write your own [custom authentication](https://redwoodjs.com/docs/auth/custom) option. If going self-hosted, we include login, signup, and reset password pages, as well as the option to include TouchID/FaceID and third party biometric readers! Once authenticated, how do you know what a user is allowed to do or not do? Redwood includes helpers for [role-based access control](https://redwoodjs.com/docs/how-to/role-based-access-control-rbac) that integrates on both the front- and backend. @@ -99,7 +99,7 @@ Going back to our testimonals hypothetical, a cell to fetch and display them may ```js export const QUERY = gql` query GetTestimonials { - testimonals { + testimonials { id author quote @@ -122,11 +122,11 @@ export const Success = ({ testimonials }) => { } ``` -(In this case we don't export `Empty` so that if there aren't any, that section of the final page won't render anything, not even indicating to the user that something is missing.) +(In this case we don't export `Empty` so that if there aren't any testimonials, that section of the final page won't render anything, not even indicating to the user that something is missing.) If you ever create additional clients for your server (a mobile app, perhaps) you'll be giving yourself a huge advantage by using GraphQL from the start. -Oh, and prerendering also works with cells! At build time, Redwood will start up the GraphQL server and make requests, just as if a user was access the pages, rendering the result to plain HTML, ready to be loaded instantly by the browser. +Oh, and prerendering also works with cells! At build time, Redwood will start up the GraphQL server and make requests, just as if a user was accessing the pages, rendering the result to plain HTML, ready to be loaded instantly by the browser. ### Apollo Cache @@ -170,7 +170,7 @@ model Testimonial { } ``` -Prisma has a couple command line tools that take changes to this file and turn them into [SQL DDL commands](https://www.sqlshack.com/sql-ddl-getting-started-with-sql-ddl-commands-in-sql-server/) which are executed against your database to update its structure to match. +Prisma has a couple of command line tools that take changes to this file and turn them into [SQL DDL commands](https://www.sqlshack.com/sql-ddl-getting-started-with-sql-ddl-commands-in-sql-server/) which are executed against your database to update its structure to match. #### GraphQL @@ -234,7 +234,7 @@ export const schema = gql` ` ``` -The `testimonials` query is marked with the [GraphQL directive](../../directives.md) `@skipAuth` meaning that requests here should *not* be limited to authenticated users. However, the critical `createTestimonail` and `deleteTestimonial` mutations are marked `@requireAuth`, and so can only be called by a logged in user. +The `testimonials` query is marked with the [GraphQL directive](../../directives.md) `@skipAuth` meaning that requests here should *not* be limited to authenticated users. However, the critical `createTestimonial` and `deleteTestimonial` mutations are marked `@requireAuth`, and so can only be called by a logged in user. Redwood's backend GraphQL server is powered by [GraphQL Yoga](https://the-guild.dev/graphql/yoga-server) and so you have access to everything that makes Yoga secure and performant: rate and depth limiting, logging, directives, and a ton more. @@ -275,7 +275,7 @@ There's even an interactive console that lets you, for example, execute Prisma q ## Jest -Being able to develop a full-stack application this easily is great, but how do you verify that it's working as intended? That's where a great test suite comes in. [Jest](https://jestjs.io/) is a test framework that, as they say, focuses on simplicty. We felt that it was a natural fit with Redwood, and so most files you can generate will include the related test file automatically (pre-filled with some tests, even!). +Being able to develop a full-stack application this easily is great, but how do you verify that it's working as intended? That's where a great test suite comes in. [Jest](https://jestjs.io/) is a test framework that, as they say, focuses on simplicity. We felt that it was a natural fit with Redwood, and so most files you can generate will include the related test file automatically (pre-filled with some tests, even!). Redwood includes several Jest helpers and matchers, allowing you to mock out GraphQL requests, database data, logged in users, and more. diff --git a/docs/versioned_docs/version-6.0/tutorial/chapter1/file-structure.md b/docs/versioned_docs/version-6.x/tutorial/chapter1/file-structure.md similarity index 100% rename from docs/versioned_docs/version-6.0/tutorial/chapter1/file-structure.md rename to docs/versioned_docs/version-6.x/tutorial/chapter1/file-structure.md diff --git a/docs/versioned_docs/version-6.0/tutorial/chapter1/first-page.md b/docs/versioned_docs/version-6.x/tutorial/chapter1/first-page.md similarity index 100% rename from docs/versioned_docs/version-6.0/tutorial/chapter1/first-page.md rename to docs/versioned_docs/version-6.x/tutorial/chapter1/first-page.md diff --git a/docs/versioned_docs/version-6.0/tutorial/chapter1/installation.md b/docs/versioned_docs/version-6.x/tutorial/chapter1/installation.md similarity index 99% rename from docs/versioned_docs/version-6.0/tutorial/chapter1/installation.md rename to docs/versioned_docs/version-6.x/tutorial/chapter1/installation.md index 26df59ca3c32..266664f74687 100644 --- a/docs/versioned_docs/version-6.0/tutorial/chapter1/installation.md +++ b/docs/versioned_docs/version-6.x/tutorial/chapter1/installation.md @@ -24,6 +24,7 @@ You'll have a new directory `redwoodblog` containing several directories and fil ```bash cd redwoodblog +yarn install yarn redwood dev ``` diff --git a/docs/versioned_docs/version-6.0/tutorial/chapter1/layouts.md b/docs/versioned_docs/version-6.x/tutorial/chapter1/layouts.md similarity index 100% rename from docs/versioned_docs/version-6.0/tutorial/chapter1/layouts.md rename to docs/versioned_docs/version-6.x/tutorial/chapter1/layouts.md diff --git a/docs/versioned_docs/version-6.0/tutorial/chapter1/prerequisites.md b/docs/versioned_docs/version-6.x/tutorial/chapter1/prerequisites.md similarity index 89% rename from docs/versioned_docs/version-6.0/tutorial/chapter1/prerequisites.md rename to docs/versioned_docs/version-6.x/tutorial/chapter1/prerequisites.md index 4013773b7d2c..094a84230861 100644 --- a/docs/versioned_docs/version-6.0/tutorial/chapter1/prerequisites.md +++ b/docs/versioned_docs/version-6.x/tutorial/chapter1/prerequisites.md @@ -31,7 +31,7 @@ If you have an existing site created with a prior version, you'll need to upgrad During installation, RedwoodJS checks if your system meets version requirements for Node and Yarn: - node: "=18.x" -- yarn: ">=1.15" +- yarn: ">=1.22.21" If you're using a version of Node or Yarn that's **less** than what's required, _the installation bootstrap will result in an ERROR_. To check, please run the following from your terminal command line: @@ -46,14 +46,21 @@ Please do upgrade accordingly. Then proceed to the Redwood installation when you There are many ways to install and manage both Node.js and Yarn. If you're installing for the first time, we recommend the following: -**1. Yarn** -We recommend following the [instructions via Yarnpkg.com](https://yarnpkg.com/getting-started/install). - -**2. Node.js** +**1. Node.js** Using the recommended [LTS version from Nodejs.org](https://nodejs.org/en/) is preferred. - `nvm` is a great tool for managing multiple versions of Node on one system. It takes a bit more effort to set up and learn, however. Follow the [nvm installation instructions](https://github.com/nvm-sh/nvm#installing-and-updating). (Windows users should go to [nvm-windows](https://github.com/coreybutler/nvm-windows/releases)). For **Mac** users with Homebrew installed, you can alternatively use it to [install `nvm`](https://formulae.brew.sh/formula/nvm). Or, refer to our how to guide [using nvm](../../how-to/using-nvm.md). +**2. Yarn** +As of Node.js v18+, Node.js ships with a CLI tool called [Corepack](https://nodejs.org/docs/latest-v18.x/api/corepack.html) to manage package managers. All you have to do is enable it, then you'll have Yarn: + +``` +corepack enable +yarn -v +``` + +The version of Yarn will probably be `1.22.21`, but don't worry—in your Redwood project, Corepack will know to use a modern version of Yarn because of the `packageManager` field in the root `package.json`. + **Windows:** Recommended Development Setup - JavaScript development on Windows has specific requirements in addition to Yarn and npm. Follow our simple setup guide: diff --git a/docs/versioned_docs/version-6.0/tutorial/chapter1/second-page.md b/docs/versioned_docs/version-6.x/tutorial/chapter1/second-page.md similarity index 100% rename from docs/versioned_docs/version-6.0/tutorial/chapter1/second-page.md rename to docs/versioned_docs/version-6.x/tutorial/chapter1/second-page.md diff --git a/docs/versioned_docs/version-6.0/tutorial/chapter2/cells.md b/docs/versioned_docs/version-6.x/tutorial/chapter2/cells.md similarity index 100% rename from docs/versioned_docs/version-6.0/tutorial/chapter2/cells.md rename to docs/versioned_docs/version-6.x/tutorial/chapter2/cells.md diff --git a/docs/versioned_docs/version-6.0/tutorial/chapter2/getting-dynamic.md b/docs/versioned_docs/version-6.x/tutorial/chapter2/getting-dynamic.md similarity index 99% rename from docs/versioned_docs/version-6.0/tutorial/chapter2/getting-dynamic.md rename to docs/versioned_docs/version-6.x/tutorial/chapter2/getting-dynamic.md index 627de868701e..c67b0712b314 100644 --- a/docs/versioned_docs/version-6.0/tutorial/chapter2/getting-dynamic.md +++ b/docs/versioned_docs/version-6.x/tutorial/chapter2/getting-dynamic.md @@ -135,7 +135,7 @@ Okay but what if we click "Delete"? So, Redwood just created all the pages, components and services necessary to perform all CRUD actions on our posts table. No need to even open Prisma Studio or login through a terminal window and write SQL from scratch. Redwood calls these _scaffolds_. -:::caution +:::warning If you head back to VSCode at some point and get a notice in one of the generated Post cells about `Cannot query "posts" on type "Query"` don't worry: we've seen this from time to time on some systems. There are two easy fixes: diff --git a/docs/versioned_docs/version-6.0/tutorial/chapter2/routing-params.md b/docs/versioned_docs/version-6.x/tutorial/chapter2/routing-params.md similarity index 99% rename from docs/versioned_docs/version-6.0/tutorial/chapter2/routing-params.md rename to docs/versioned_docs/version-6.x/tutorial/chapter2/routing-params.md index dbeb77632382..ff502bb040ac 100644 --- a/docs/versioned_docs/version-6.0/tutorial/chapter2/routing-params.md +++ b/docs/versioned_docs/version-6.x/tutorial/chapter2/routing-params.md @@ -349,7 +349,7 @@ export const Success = ({ article }) => { <TabItem value="ts" label="TypeScript"> ```tsx title="web/src/components/ArticleCell/ArticleCell.tsx" -import type { FindArticleQuery } from 'types/graphql' +import type { FindArticleQuery, FindArticleQueryVariables } from 'types/graphql' import type { CellSuccessProps, CellFailureProps } from '@redwoodjs/web' export const QUERY = gql` @@ -775,7 +775,7 @@ export const Success = ({ article }) => { // highlight-next-line import Article from 'src/components/Article' -import type { FindArticleQuery } from 'types/graphql' +import type { FindArticleQuery, FindArticleQueryVariables } from 'types/graphql' import type { CellSuccessProps, CellFailureProps } from '@redwoodjs/web' export const QUERY = gql` diff --git a/docs/versioned_docs/version-6.0/tutorial/chapter2/side-quest.md b/docs/versioned_docs/version-6.x/tutorial/chapter2/side-quest.md similarity index 100% rename from docs/versioned_docs/version-6.0/tutorial/chapter2/side-quest.md rename to docs/versioned_docs/version-6.x/tutorial/chapter2/side-quest.md diff --git a/docs/versioned_docs/version-6.0/tutorial/chapter3/forms.md b/docs/versioned_docs/version-6.x/tutorial/chapter3/forms.md similarity index 100% rename from docs/versioned_docs/version-6.0/tutorial/chapter3/forms.md rename to docs/versioned_docs/version-6.x/tutorial/chapter3/forms.md diff --git a/docs/versioned_docs/version-6.0/tutorial/chapter3/saving-data.md b/docs/versioned_docs/version-6.x/tutorial/chapter3/saving-data.md similarity index 99% rename from docs/versioned_docs/version-6.0/tutorial/chapter3/saving-data.md rename to docs/versioned_docs/version-6.x/tutorial/chapter3/saving-data.md index 67b8c1f8de47..7b4732c5c14a 100644 --- a/docs/versioned_docs/version-6.0/tutorial/chapter3/saving-data.md +++ b/docs/versioned_docs/version-6.x/tutorial/chapter3/saving-data.md @@ -1795,7 +1795,7 @@ const [create, { loading, error }] = useMutation< </TabItem> </Tabs> -:::caution +:::warning You can put the email validation back into the `<TextField>` now, but you should leave the server validation in place, just in case. diff --git a/docs/versioned_docs/version-6.0/tutorial/chapter4/authentication.md b/docs/versioned_docs/version-6.x/tutorial/chapter4/authentication.md similarity index 99% rename from docs/versioned_docs/version-6.0/tutorial/chapter4/authentication.md rename to docs/versioned_docs/version-6.x/tutorial/chapter4/authentication.md index 562d7b34b0a3..c42b3fb7d393 100644 --- a/docs/versioned_docs/version-6.0/tutorial/chapter4/authentication.md +++ b/docs/versioned_docs/version-6.x/tutorial/chapter4/authentication.md @@ -95,7 +95,7 @@ Redwood includes [integrations](../../authentication.md) for several of the most - [Auth0](https://auth0.com/) - [Clerk](https://clerk.dev/) - [Netlify Identity](https://docs.netlify.com/visitor-access/identity/) -- [Firebase's GoogleAuthProvider](https://firebase.google.com/docs/reference/js/firebase.auth.GoogleAuthProvider) +- [Firebase's GoogleAuthProvider](https://firebase.google.com/docs/reference/js/v8/firebase.auth.GoogleAuthProvider) - [Supabase](https://supabase.io/docs/guides/auth) - [SuperTokens](https://supertokens.com) @@ -202,7 +202,7 @@ Going to the admin section now prevents a non-logged in user from seeing posts, ```jsx title="web/src/Routes.jsx" // highlight-next-line -import { Private, Router, Route, Set } from '@redwoodjs/router' +import { PrivateSet, Router, Route, Set } from '@redwoodjs/router' import ScaffoldLayout from 'src/layouts/ScaffoldLayout' import BlogLayout from 'src/layouts/BlogLayout' diff --git a/docs/versioned_docs/version-6.0/tutorial/chapter4/deployment.md b/docs/versioned_docs/version-6.x/tutorial/chapter4/deployment.md similarity index 99% rename from docs/versioned_docs/version-6.0/tutorial/chapter4/deployment.md rename to docs/versioned_docs/version-6.x/tutorial/chapter4/deployment.md index fd273091f468..bf7376195eb3 100644 --- a/docs/versioned_docs/version-6.0/tutorial/chapter4/deployment.md +++ b/docs/versioned_docs/version-6.x/tutorial/chapter4/deployment.md @@ -87,7 +87,7 @@ This adds a `netlify.toml` config file in the root of the project that is good t And with that, we're ready to setup Netlify itself. -:::caution +:::warning While you may be tempted to use the [Netlify CLI](https://cli.netlify.com) commands to [build](https://cli.netlify.com/commands/build) and [deploy](https://cli.netlify.com/commands/deploy) your project directly from you local project directory, doing so **will lead to errors when deploying and/or when running functions**. I.e. errors in the function needed for the GraphQL server, but also other serverless functions. The main reason for this is that these Netlify CLI commands simply build and deploy -- they build your project locally and then push the dist folder. That means that when building a RedwoodJS project, the [Prisma client is generated with binaries matching the operating system at build time](https://cli.netlify.com/commands/link) -- and not the [OS compatible](https://www.prisma.io/docs/reference/api-reference/prisma-schema-reference#binarytargets-options) with running functions on Netlify. Your Prisma client engine may be `darwin` for OSX or `windows` for Windows, but it needs to be `debian-openssl-1.1.x` or `rhel-openssl-1.1.x`. If the client is incompatible, your functions will fail. diff --git a/docs/versioned_docs/version-6.0/tutorial/chapter5/first-story.md b/docs/versioned_docs/version-6.x/tutorial/chapter5/first-story.md similarity index 100% rename from docs/versioned_docs/version-6.0/tutorial/chapter5/first-story.md rename to docs/versioned_docs/version-6.x/tutorial/chapter5/first-story.md diff --git a/docs/versioned_docs/version-6.0/tutorial/chapter5/first-test.md b/docs/versioned_docs/version-6.x/tutorial/chapter5/first-test.md similarity index 100% rename from docs/versioned_docs/version-6.0/tutorial/chapter5/first-test.md rename to docs/versioned_docs/version-6.x/tutorial/chapter5/first-test.md diff --git a/docs/versioned_docs/version-6.0/tutorial/chapter5/storybook.md b/docs/versioned_docs/version-6.x/tutorial/chapter5/storybook.md similarity index 100% rename from docs/versioned_docs/version-6.0/tutorial/chapter5/storybook.md rename to docs/versioned_docs/version-6.x/tutorial/chapter5/storybook.md diff --git a/docs/versioned_docs/version-6.0/tutorial/chapter5/testing.md b/docs/versioned_docs/version-6.x/tutorial/chapter5/testing.md similarity index 100% rename from docs/versioned_docs/version-6.0/tutorial/chapter5/testing.md rename to docs/versioned_docs/version-6.x/tutorial/chapter5/testing.md diff --git a/docs/versioned_docs/version-6.0/tutorial/chapter6/comment-form.md b/docs/versioned_docs/version-6.x/tutorial/chapter6/comment-form.md similarity index 100% rename from docs/versioned_docs/version-6.0/tutorial/chapter6/comment-form.md rename to docs/versioned_docs/version-6.x/tutorial/chapter6/comment-form.md diff --git a/docs/versioned_docs/version-6.0/tutorial/chapter6/comments-schema.md b/docs/versioned_docs/version-6.x/tutorial/chapter6/comments-schema.md similarity index 99% rename from docs/versioned_docs/version-6.0/tutorial/chapter6/comments-schema.md rename to docs/versioned_docs/version-6.x/tutorial/chapter6/comments-schema.md index 421bfb51b2e5..243488167736 100644 --- a/docs/versioned_docs/version-6.0/tutorial/chapter6/comments-schema.md +++ b/docs/versioned_docs/version-6.x/tutorial/chapter6/comments-schema.md @@ -685,7 +685,7 @@ export const standard = defineScenario({ </TabItem> <TabItem value="ts" label="TypeScript"> -```javascript title="api/src/services/comments/comments.scenarios.ts" +```ts title="api/src/services/comments/comments.scenarios.ts" import type { Prisma } from '@prisma/client' export const standard = defineScenario<Prisma.CommentCreateArgs>({ diff --git a/docs/versioned_docs/version-6.0/tutorial/chapter6/multiple-comments.md b/docs/versioned_docs/version-6.x/tutorial/chapter6/multiple-comments.md similarity index 100% rename from docs/versioned_docs/version-6.0/tutorial/chapter6/multiple-comments.md rename to docs/versioned_docs/version-6.x/tutorial/chapter6/multiple-comments.md diff --git a/docs/versioned_docs/version-6.0/tutorial/chapter6/the-redwood-way.md b/docs/versioned_docs/version-6.x/tutorial/chapter6/the-redwood-way.md similarity index 100% rename from docs/versioned_docs/version-6.0/tutorial/chapter6/the-redwood-way.md rename to docs/versioned_docs/version-6.x/tutorial/chapter6/the-redwood-way.md diff --git a/docs/versioned_docs/version-6.0/tutorial/chapter7/api-side-currentuser.md b/docs/versioned_docs/version-6.x/tutorial/chapter7/api-side-currentuser.md similarity index 99% rename from docs/versioned_docs/version-6.0/tutorial/chapter7/api-side-currentuser.md rename to docs/versioned_docs/version-6.x/tutorial/chapter7/api-side-currentuser.md index 105cc05349ce..bce5ffaa2efd 100644 --- a/docs/versioned_docs/version-6.0/tutorial/chapter7/api-side-currentuser.md +++ b/docs/versioned_docs/version-6.x/tutorial/chapter7/api-side-currentuser.md @@ -112,7 +112,7 @@ Whoops! Similar to what happened when we added `roles` to `User`, We made `userId` a required field, but we already have several posts in our development database. Since we don't have a default value for `userId` defined, it's impossible to add this column to the database. -:::caution Why don't we just set `@default(1)` in the schema? +:::warning Why don't we just set `@default(1)` in the schema? This would get us past this problem, but could cause hard-to-track-down bugs in the future: if you ever forget to assign a `post` to a `user`, rather than fail it'll happily just set `userId` to `1`, which may or may not even exist some day! It's best to take the extra time to do things The Right Way and avoid the quick hacks to get past an annoyance like this. Your future self will thank you! @@ -605,7 +605,7 @@ Finally, we'll need to update several of the scaffold components to use the new ```javascript title="web/src/components/Post/EditPostCell/EditPostCell.js" export const QUERY = gql` - query FindPostById($id: Int!) { + query EditPostById($id: Int!) { // highlight-next-line post: adminPost(id: $id) { id diff --git a/docs/versioned_docs/version-6.0/tutorial/chapter7/rbac.md b/docs/versioned_docs/version-6.x/tutorial/chapter7/rbac.md similarity index 99% rename from docs/versioned_docs/version-6.0/tutorial/chapter7/rbac.md rename to docs/versioned_docs/version-6.x/tutorial/chapter7/rbac.md index bac9123c38c4..490b98a4d5ac 100644 --- a/docs/versioned_docs/version-6.0/tutorial/chapter7/rbac.md +++ b/docs/versioned_docs/version-6.x/tutorial/chapter7/rbac.md @@ -178,14 +178,14 @@ The easiest way to prevent access to an entire URL is via the Router. The `<Priv ```tsx title="web/src/Routes.tsx" // highlight-next-line -<Private unauthenticated="home" roles="admin"> +<PrivateSet unauthenticated="home" roles="admin"> <Set wrap={ScaffoldLayout} title="Posts" titleTo="posts" buttonLabel="New Post" buttonTo="newPost"> <Route path="/admin/posts/new" page={PostNewPostPage} name="newPost" /> <Route path="/admin/posts/{id:Int}/edit" page={PostEditPostPage} name="editPost" /> <Route path="/admin/posts/{id:Int}" page={PostPostPage} name="post" /> <Route path="/admin/posts" page={PostPostsPage} name="posts" /> </Set> -</Private> +</PrivateSet> ``` </TabItem> @@ -228,7 +228,7 @@ Which should return the new content of the user: } ``` -:::caution +:::warning If you re-used the same console session from the previous section, you'll need to quit it and start it again for it to know about the new Prisma data structure. If you still can't get the update to work, maybe your user doesn't have an `id` of `1`! Run `db.user.findMany()` first and then get the `id` of the user you want to update. @@ -895,7 +895,7 @@ describe('Comment', () => { We moved the default `comment` object to a constant `COMMENT` and then used that in all tests. We also needed to add `waitFor()` since the `hasRole()` check in the Comment itself actually executes some GraphQL calls behind the scenes to figure out who the user is. The test suite makes mocked GraphQL calls, but they're still asynchronous and need to be waited for. If you don't wait, then `currentUser` will be `null` when the test starts, and Jest will be happy with that result. But we won't—we need to wait for the actual value from the GraphQL call. -:::caution Seeing errors in your test suite? +:::warning Seeing errors in your test suite? We added fields to the database and sometimes the test runner doesn't realize this. You may need to restart it to get the test database migrated to match what's in `schema.prisma`. Press `q` or `Ctrl-C` in your test runner if it's still running, then: diff --git a/docs/versioned_docs/version-6.0/tutorial/foreword.md b/docs/versioned_docs/version-6.x/tutorial/foreword.md similarity index 99% rename from docs/versioned_docs/version-6.0/tutorial/foreword.md rename to docs/versioned_docs/version-6.x/tutorial/foreword.md index f6b44e65c5de..ec1f1810ed40 100644 --- a/docs/versioned_docs/version-6.0/tutorial/foreword.md +++ b/docs/versioned_docs/version-6.x/tutorial/foreword.md @@ -24,7 +24,7 @@ They might look like this... ::: -:::caution +:::warning or sometimes like this... diff --git a/docs/versioned_docs/version-6.0/tutorial/intermission.md b/docs/versioned_docs/version-6.x/tutorial/intermission.md similarity index 97% rename from docs/versioned_docs/version-6.0/tutorial/intermission.md rename to docs/versioned_docs/version-6.x/tutorial/intermission.md index 0416b563de5b..793730157af2 100644 --- a/docs/versioned_docs/version-6.0/tutorial/intermission.md +++ b/docs/versioned_docs/version-6.x/tutorial/intermission.md @@ -34,9 +34,9 @@ yarn rw dev If you haven't been through the first tutorial, or maybe you went through it on an older version of Redwood (anything pre-0.41) you can clone [this repo](https://github.com/redwoodjs/redwood-tutorial) which contains everything built so far and also adds a little styling so it isn't quite so...tough to look at. The example repo includes [TailwindCSS](https://tailwindcss.com) to style things up and adds a `<div>` or two to give us some additional hooks to hang styling on. -:::caution The TypeScript version of the Example Repo is currently in progress +:::warning The TypeScript version of the Example Repo is currently in progress -If you want to complete the tutorial in TypeScript, continue with your own repo, making any necessary edits. Don't worry, the remainder of the tutorial continues to offer both TypeScript and JavaScript example code changes. +If you want to complete the tutorial in TypeScript, continue with your own repo, making any necessary edits. Don't worry, the remainder of the tutorial continues to offer both TypeScript and JavaScript example code changes. ::: diff --git a/docs/versioned_docs/version-6.0/typescript/generated-types.md b/docs/versioned_docs/version-6.x/typescript/generated-types.md similarity index 84% rename from docs/versioned_docs/version-6.0/typescript/generated-types.md rename to docs/versioned_docs/version-6.x/typescript/generated-types.md index 76d33cb1b451..644cc7b39bd4 100644 --- a/docs/versioned_docs/version-6.0/typescript/generated-types.md +++ b/docs/versioned_docs/version-6.x/typescript/generated-types.md @@ -164,9 +164,40 @@ You can configure graphql-codegen in a number of different ways: `codegen.yml`, For completeness, [here's the docs](https://www.graphql-code-generator.com/docs/config-reference/config-field) on configuring GraphQL Code Generator. Currently, Redwood only supports the root level `config` option. +## Experimental SDL Code Generation + +There is also an experimental code generator based on [sdl-codegen](https://github.com/sdl-codegen/sdl-codegen) available. sdl-codegen is a fresh implementation of code generation for service files, built with Redwood in mind. It is currently in opt-in and can be enabled by setting the `experimentalSdlCodeGen` flag to `true` in your `redwood.toml` file: + +```toml title="redwood.toml" +[experimental] + useSDLCodeGenForGraphQLTypes = true +``` + +Running `yarn rw g types` will generate types for your resolvers on a per-file basis, this feature can be paired with the optional eslint auto-fix rule to have types automatically applied to your resolvers in TypeScript service files by editing your root `package.json` with: + +```diff title="package.json" + "eslintConfig": { + "extends": "@redwoodjs/eslint-config", + "root": true, + "parserOptions": { + "warnOnUnsupportedTypeScriptVersion": false + }, ++ "overrides": [ ++ { ++ "files": [ ++ "api/src/services/**/*.ts" ++ ], ++ "rules": { ++ "@redwoodjs/service-type-annotations": "error" ++ } ++ } + ] + }, +``` + :::tip Using VSCode? -As a part of type generation, the [VSCode GraphQL extension](https://marketplace.visualstudio.com/items?itemName=GraphQL.vscode-graphql) configures itself based on the merged schema Redwood generates in `.redwood/schema.graphql`. +As a part of type generation, the extension [GraphQL: Language Feature Support](https://marketplace.visualstudio.com/items?itemName=GraphQL.vscode-graphql) configures itself based on the merged schema Redwood generates in `.redwood/schema.graphql`. You can configure it further in `graphql.config.js` at the root of your project. ::: diff --git a/docs/versioned_docs/version-6.0/typescript/introduction.md b/docs/versioned_docs/version-6.x/typescript/introduction.md similarity index 100% rename from docs/versioned_docs/version-6.0/typescript/introduction.md rename to docs/versioned_docs/version-6.x/typescript/introduction.md diff --git a/docs/versioned_docs/version-6.0/typescript/strict-mode.md b/docs/versioned_docs/version-6.x/typescript/strict-mode.md similarity index 100% rename from docs/versioned_docs/version-6.0/typescript/strict-mode.md rename to docs/versioned_docs/version-6.x/typescript/strict-mode.md diff --git a/docs/versioned_docs/version-6.0/typescript/utility-types.md b/docs/versioned_docs/version-6.x/typescript/utility-types.md similarity index 100% rename from docs/versioned_docs/version-6.0/typescript/utility-types.md rename to docs/versioned_docs/version-6.x/typescript/utility-types.md diff --git a/docs/versioned_docs/version-6.0/vite-configuration.md b/docs/versioned_docs/version-6.x/vite-configuration.md similarity index 100% rename from docs/versioned_docs/version-6.0/vite-configuration.md rename to docs/versioned_docs/version-6.x/vite-configuration.md diff --git a/docs/versioned_docs/version-6.0/webhooks.md b/docs/versioned_docs/version-6.x/webhooks.md similarity index 98% rename from docs/versioned_docs/version-6.0/webhooks.md rename to docs/versioned_docs/version-6.x/webhooks.md index 706c37abb8f6..a185c7ebc4fa 100644 --- a/docs/versioned_docs/version-6.0/webhooks.md +++ b/docs/versioned_docs/version-6.x/webhooks.md @@ -214,6 +214,14 @@ This is a variation on the SHA256 HMAC verification that works with binary buffe Svix (and by extension, Clerk) gives you a secret token that it uses to create a hash signature with each payload. This hash signature is included with the headers of each request as `svix-signature`. +> Some production environments, like Vercel, might base64 encode the request body string. In that case, the body must be conditionally parsed. +> ```js +> export const handler = async (event: APIGatewayEvent) => { +> const body = event.isBase64Encoded +> ? Buffer.from(event.body, 'base64').toString('utf-8') +> : event.body +> ``` + ```tsx import type { APIGatewayEvent } from 'aws-lambda' import { diff --git a/docs/versioned_docs/version-7.0/a11y.md b/docs/versioned_docs/version-7.0/a11y.md new file mode 100644 index 000000000000..7cc09b9b0df3 --- /dev/null +++ b/docs/versioned_docs/version-7.0/a11y.md @@ -0,0 +1,170 @@ +--- +slug: accessibility +description: Accessibility is a core feature that's built-in +--- + +# Accessibility (aka a11y) + +We built Redwood to make building websites more accessible (we write all the config so you don't have to), but Redwood's also built to help you make more accessible websites. +Accessibility shouldn't be a nice-to-have. +It should be a given from the start. +A core feature that's built-in and well-supported. + +There's a lot of great tooling out there that'll not only help you build accessible websites, but also help you learn exactly what that means. + +> **Does tooling obviate the need for manual testing?** +> +> No—even with all the tooling in the world, manual testing is still important, especially for accessibility. +> But just because tooling doesn't catch everything doesn't mean it's not valuable. +> It'd be much harder to learn what to look for without it. + +## Accessible Routing + +For single-page applications (SPAs), accessibility starts with the router. +Without a full-page refresh, you just can't be sure that things like announcements and focus are being taken care of the way they're supposed to be. +Here's a great example of [how disorienting SPAs can be to screen-reader users](https://www.youtube.com/watch?v=NKTdNv8JpuM). +On navigation, nothing's announced. +The lack of an announcement isn't just buggy behavior—it's broken. + +Normally, the onus would be on you as a developer to announce to screen-reader users that they've navigated somewhere new. +That's a lot to ask—and hard to get right—especially when you're just trying to build your app. + +Luckily, if you're writing thoughtful content and marking it up semantically, there's nothing you have to do! +The router automatically announces pages on navigation, and looks for announcements in this order: + +1. The `RouteAnnouncement` component +2. The page's `<h1>` +3. `document.title` +4. `location.pathname` + +The reason for this order is that announcements should be as specific as possible. +more specific usually means more descriptive, and more descriptive usually means that users can not only orient themselves and navigate through the content, but also find it again. + +> If you're not sure if your content is descriptive enough, see the [W3 guidelines](https://www.w3.org/WAI/WCAG21/Techniques/general/G88.html). + +Even though Redwood looks for a `RouteAnnouncement` component first, you don't have to have one on every page—it's more than ok for the `<h1>` to be what's announced. +`RouteAnnouncement` is there for when the situation calls for a custom announcement. + +### `RouteAnnouncement` + +The way `RouteAnnouncement` works is simple: its children will be announced. +Note that this can be something on the page or can be something that's visually hidden using the `visuallyHidden` prop: + +```jsx title="web/src/pages/HomePage/HomePage.js" +import { RouteAnnouncement } from '@redwoodjs/router' + +const HomePage = () => { + return ( + // This will still be visible + <RouteAnnouncement> + <h1>Welcome to my site!</h1> + </RouteAnnouncement> + ) +} + +export default HomePage +``` + +```jsx title="web/src/pages/AboutPage/AboutPage.js" +import { RouteAnnouncement } from '@redwoodjs/router' + +const AboutPage = () => { + return ( + <h1>Welcome to my site!</h1> + // This won't be visible + // highlight-start + <RouteAnnouncement visuallyHidden> + All about me + </RouteAnnouncement> + // highlight-end + ) +} + +export default AboutPage +``` + +`visuallyHidden` shouldn't be the first thing you reach for—it's good to maintain parity between your site's visual and audible experiences. +But it's there if you need it. + +## Focus + +On page change, Redwood Router resets focus to the top of the DOM so that users can navigate through the new page. +While this is the expected behavior (and the behavior you usually want), for some pages—especially those with a lot of navigation—it can be cumbersome for users to have tab through navigation before getting to the main point. +(And that goes for every page change!) + +Right now, there's two ways to alleviate this: with skip links or the `RouteFocus` component. + +### Skip Links + +Since the main content isn't usually the first thing on the page, it's a best practice to provide a shortcut for keyboard and screen-reader users to skip to it. +Skip links do just that, and if you generate a layout using the `--skipLink` option, you'll get one with a skip link: + +``` +yarn rw g layout main --skipLink +``` + +```jsx title="web/src/layouts/MainLayout/MainLayout.js" +import { SkipNavLink, SkipNavContent } from '@redwoodjs/router' +import '@redwoodjs/router/skip-nav.css' + +const MainLayout = ({ children }) => { + return ( + <> + <SkipNavLink /> + <nav></nav> + <SkipNavContent /> + <main>{children}</main> + </> + ) +} + +export default MainLayout +``` + +`SkipNavLink` renders a link that remains hidden till focused and `SkipNavContent` renders a div as the target for the link. +The code for these components comes from Reach UI. For more details, see [Reach UI's docs](https://reach.tech/skip-nav/#reach-skip-nav). + +One thing you'll probably want to do is change the URL the skip link sends the user to when activated. +You can do that by changing the `contentId` and `id` props of `SkipNavLink` and `SkipNavContent` respectively: + +```jsx +<SkipNavLink contentId="main-content" /> +{/* ... */} +<SkipNavContent id="main-content" /> +``` + +If you'd prefer to implement your own skip link, [Ben Myers' blog](https://benmyers.dev/blog/skip-links/) is a great resource, and a great place to read about accessibility in general. + +### `RouteFocus` + +Sometimes you don't want to just skip the nav, but send a user somewhere. +In this situation, you of course have the foresight that that place is where the user wants to be. +So please use this at your discretion—sending a user to an unexpected location can be worse than sending them back the top. + +Having said that, if you know that on a particular page change a user's focus is better off being directed to a particular element, the `RouteFocus` component is what you want: + +```jsx title="web/src/pages/ContactPage/ContactPage.js" +// highlight-next-line +import { RouteFocus } from '@redwoodjs/router' + +const ContactPage = () => ( + <nav> + {/* Way too much nav... */} + </nav> + + // The contact form the user actually wants to interact with + // highlight-start + <RouteFocus> + <TextField name="name" /> + </RouteFocus> + // highlight-end +) + +export default ContactPage +``` + +`RouteFocus` tells the router to send focus to it's child on page change. In the example above, when the user navigates to the contact page, the name text field on the form is focused—the first field of the form they're here to fill out. + +<div class="video-container"> + <iframe src="https://www.youtube.com/embed/T1zs77LU68w?t=3240" frameborder="0" allow="accelerometer; autoplay; encrypted-media; gyroscope; picture-in-picture; modestbranding; showinfo=0; fullscreen"></iframe> +</div> diff --git a/docs/versioned_docs/version-7.0/app-configuration-redwood-toml.md b/docs/versioned_docs/version-7.0/app-configuration-redwood-toml.md new file mode 100644 index 000000000000..4439e88ea3db --- /dev/null +++ b/docs/versioned_docs/version-7.0/app-configuration-redwood-toml.md @@ -0,0 +1,194 @@ +--- +title: App Configuration +description: Configure your app with redwood.toml +--- + +# App Configuration: redwood.toml + +One of the premier places you can configure your Redwood app is `redwood.toml`. By default, `redwood.toml` lists the following configuration options: + +```toml title="redwood.toml" +[web] + title = "Redwood App" + port = 8910 + apiUrl = "/.redwood/functions" + includeEnvironmentVariables = [] +[api] + port = 8911 +[browser] + open = true +[notifications] + versionUpdates = ["latest"] +``` + +These are listed by default because they're the ones that you're most likely to configure, but there are plenty more available. + +You can think of `redwood.toml` as a frontend for configuring Redwood's build tools. +For certain options, instead of having to configure build tools directly, there's quick access via `redwood.toml`. + +## [web] + +| Key | Description | Default | +| :---------------------------- | :--------------------------------------------------------- | :---------------------- | +| `title` | Title of your Redwood app | `'Redwood App'` | +| `port` | Port for the web server to listen at | `8910` | +| `apiUrl` | URL to your api server. This can be a relative URL in which case it acts like a proxy, or a fully-qualified URL | `'/.redwood/functions'` | +| `includeEnvironmentVariables` | Environment variables made available to the web side during dev and build | `[]` | +| `host` | Hostname for the web server to listen at | Defaults to `'0.0.0.0'` in production and `'::'` in development | +| `apiGraphQLUrl` | URL to your GraphQL function | `'${apiUrl}/graphql'` | +| `apiDbAuthUrl` | URL to your dbAuth function | `'${apiUrl}/auth'` | +| `sourceMap` | Enable source maps for production builds | `false` | +| `a11y` | Enable storybook `addon-a11y` and `eslint-plugin-jsx-a11y` | `true` | + +### Customizing the GraphQL Endpoint + +By default, Redwood derives the GraphQL endpoint from `apiUrl` such that it's `${apiUrl}/graphql`, (with the default `apiUrl`, `./redwood/functions/graphql`). +But sometimes you want to host your api side somewhere else. +There's two ways you can do this: + +1. Change `apiUrl`: + +```toml title="redwood.toml" +[web] + apiUrl = "https://api.coolredwoodapp.com" +``` + +Now the GraphQL endpoint is at `https://api.coolredwoodapp.com/graphql`. + +2. Change `apiGraphQLUrl`: + +```diff title="redwood.toml" + [web] + apiUrl = "/.redwood/functions" ++ apiGraphQLUrl = "https://api.coolredwoodapp.com/graphql" +``` + +### Customizing the dbAuth Endpoint + +Similarly, if you're using dbAuth, you may decide to host it somewhere else. +To do this without affecting your other endpoints, you can add `apiDbAuthUrl` to your `redwood.toml`: + +```diff title="redwood.toml" + [web] + apiUrl = "/.redwood/functions" ++ apiDbAuthUrl = "https://api.coolredwoodapp.com/auth" +``` + +:::tip + +If you host your web and api sides at different domains and don't use a proxy, make sure you have [CORS](./cors.md) configured. +Otherwise browser security features may block client requests. + +::: + +### includeEnvironmentVariables + +`includeEnvironmentVariables` is the set of environment variables that should be available to your web side during dev and build. +Use it to include env vars like public keys for third-party services you've defined in your `.env` file: + +```toml title="redwood.toml" +[web] + includeEnvironmentVariables = ["PUBLIC_KEY"] +``` + +```text title=".env" +PUBLIC_KEY=... +``` + +Instead of including them in `includeEnvironmentVariables`, you can also prefix them with `REDWOOD_ENV_` (see [Environment Variables](environment-variables.md#web)). + +:::caution `includeEnvironmentVariables` isn't for secrets + +Don't make secrets available to your web side. Everything in `includeEnvironmentVariables` is included in the bundle. + +::: + +## [api] + +| Key | Description | Default | +| :------------- | :---------------------------------- | :------------------------- | +| `port` | Port for the api server to listen at | `8911` | +| `host` | Hostname for the api server to listen at | Defaults to `'0.0.0.0'` in production and `'::'` in development | +| `debugPort` | Port for the debugger to listen at | `18911` | +| `serverConfig` | [Deprecated; use the [server file](./docker.md#using-the-server-file) instead] Path to the `server.config.js` file | `'./api/server.config.js'` | + +## [browser] + +```toml title="redwood.toml" +[browser] + open = true +``` + +Setting `open` to `true` opens your browser to `http://${web.host}:${web.port}` (by default, `http://localhost:8910`) after the dev server starts. +If you want your browser to stop opening when you run `yarn rw dev`, set this to `false`. +(Or just remove it entirely.) + +There's actually a lot more you can do here. For more, see Vite's docs on [`preview.open`](https://vitejs.dev/config/preview-options.html#preview-open). + +## [generate] + +```toml title="redwood.toml" +[generate] + tests = true + stories = true +``` + +Many of Redwood's generators create Jest tests or Storybook stories. +Understandably, this can be lot of files, and sometimes you don't want all of them, either because you don't plan on using Jest or Storybook, or are just getting started and don't want the overhead. +These options allows you to disable the generation of test and story files. + +## [cli] + +```toml title="redwood.toml" +[notifications] + versionUpdates = ["latest"] +``` + +There are new versions of the framework all the time—a major every couple months, a minor every week or two, and patches when appropriate. +And if you're on an experimental release line, like canary, there's new versions every day, multiple times. + +If you'd like to get notified (at most, once a day) when there's a new version, set `versionUpdates` to include the version tags you're interested in. + +## Using Environment Variables in `redwood.toml` + +You may find yourself wanting to change keys in `redwood.toml` based on the environment you're deploying to. +For example, you may want to point to a different `apiUrl` in your staging environment. + +You can do so with environment variables. +Let's look at an example: + +```toml title="redwood.toml" +[web] + // highlight-start + title = "App running on ${APP_TITLE}" + port = "${PORT:8910}" + apiUrl = "${API_URL:/.redwood/functions}" + // highlight-end + includeEnvironmentVariables = [] +``` + +This `${<envVar>:[fallback]}` syntax does the following: + +- sets `title` by interpolating the env var `APP_TITLE` +- sets `port` to the env var `PORT`, falling back to `8910` +- sets `apiUrl` to the env var `API_URL`, falling back to `/.redwood/functions` (the default) + +That's pretty much all there is to it. +Just remember two things: + +1. fallback is always a string +2. these values are interpolated at build time + +## Running in a Container or VM + +To run a Redwood app in a container or VM, you'll want to set both the web and api's `host` to `0.0.0.0` to allow network connections to and from the host: + +```toml title="redwood.toml" +[web] + host = '0.0.0.0' +[api] + host = '0.0.0.0' +``` + +You can also configure these values via `REDWOOD_WEB_HOST` and `REDWOOD_API_HOST`. +And if you set `NODE_ENV` to production, these will be the defaults anyway. diff --git a/docs/versioned_docs/version-7.0/assets-and-files.md b/docs/versioned_docs/version-7.0/assets-and-files.md new file mode 100644 index 000000000000..785f92861ef5 --- /dev/null +++ b/docs/versioned_docs/version-7.0/assets-and-files.md @@ -0,0 +1,180 @@ +--- +description: How to include assets—like images—in your app +--- + +# Assets and Files + +There are two ways to add an asset to your Redwood app: + +1. co-locate it with the component using it and import it into the component as if it were code +2. add it to the `web/public` directory and reference it relative to your site's root + +Where possible, prefer the first strategy. + +It lets Vite include the asset in the bundle when the file is small enough. + +### Co-locating and Importing Assets + +Let's say you want to show your app's logo in your `Header` component. +First, add your logo to the `Header` component's directory: + +```text +web/src/components/Header/ +// highlight-next-line +├── logo.png +├── Header.js +├── Header.stories.js +└── Header.test.js +``` + +Then, in the `Header` component, import your logo as if it were code: + +```jsx title="web/src/components/Header/Header.js" +// highlight-next-line +import logo from './logo.png' + +const Header = () => { + return ( + <header> + {/* ... */} + // highlight-next-line + <img src={logo} alt="Logo" /> + </header> + ) +} + +export default Header +``` + +If you're curious how this works, see the Vite docs on [static asset handling](https://vitejs.dev/guide/assets.html). + +## Adding to the `web/public` Directory + +You can also add assets to the `web/public` directory, effectively adding static files to your app. +During dev and build, Redwood copies `web/public`'s contents into `web/dist`. + +> Changes to `web/public` don't hot-reload. + +Again, because assets in this directory don't go through Vite, **use this strategy sparingly**, and mainly for assets like favicons, manifests, `robots.txt`, libraries incompatible with Vite, etc. + +### Example: Adding Your Logo and Favicon to `web/public` + +Let's say that you've added your logo and favicon to `web/public`: + +``` +web/public/ +├── img/ +│ └── logo.png +└── favicon.png +``` + +When you run `yarn rw dev` and `yarn rw build`, Redwood copies +`web/public/img/logo.png` to `web/dist/img/logo.png` and `web/public/favicon.png` to `web/dist/favicon.png`: + +```text +web/dist/ +├── static/ +│ ├── js/ +│ └── css/ +// highlight-start +├── img/ +│ └── logo.png +└── favicon.png +// highlight-end +``` + +You can reference these files in your code without any special handling: + +```jsx title="web/src/components/Header/Header.js" +import { Head } from '@redwoodjs/web' + +const Header = () => { + return ( + <> + <Head> + // highlight-next-line + <link rel="icon" type="image/png" href="favicon.png" /> + </Head> + // highlight-next-line + <img src="img/logo.png" alt="Logo" /> + </> + ) +} + +export default Header +``` + +## Styling SVGs: The special type of image + +By default you can import and use SVG images like any other image asset. + +```jsx title="web/src/components/Example.jsx" +// highlight-next-line +import svgIconSrc from '../mySvg.svg' + +const Example = () => { + return ( + <> + // highlight-next-line + <img src={svgIconSrc} alt="Logo" /> + </> + ) +} + +export default Example +``` + +Sometimes however, you might want more control over styling your SVGs - maybe you want to modify the `stroke-width` or `fill` color. + +The easiest way to achieve this, is to make your SVGs a React component. Open up your SVG file, and drop in its contents into a component – for example: + +```tsx title="web/src/components/icons/CarIcon.tsx" +import type { SVGProps } from "react" + +export const CarIcon = (props: SVGProps) => { + return ( + // 👇 content of your SVG file + <svg + // highlight-next-line + className="fill-blue-500" // 👈 you can use classes, like with tailwind + // highlight-next-line + stroke={props.strokeColor} // or adjust properties directly + // ... +``` + +If you needed to convert a whole library of SVGs into stylable (or animatable!) components, one easy way would be to use the [SVGR cli](https://react-svgr.com/docs/cli/) + + +## Custom fonts +There are many different ways to peel this potato – it's all a search away – but if you're using the CSS `@font-face` rule, we have a quick tip for you: + +1. Place your fonts in the public folder, so it gets carried across +2. In your CSS, use absolute paths - the public folder being your root - to point to the font file (same as the [Vite docs](https://vitejs.dev/guide/assets.html#the-public-directory)), for example: + +```shell +web/ +├── src + ├── App.tsx + ├── entry.client.tsx + ├── index.css + ├── ... +├── public +│ ├── favicon.png +│ ├── fonts +// highlight-next-line +│ │ └── RedwoodNeue.woff2 +``` + +```css +/* in e.g. index.css */ +@font-face { + font-family: 'Redwood Neue'; + /* 👇 it's a relative path */ + // highlight-next-line + src: url('/fonts/RedwoodNeue.woff2') + format('woff2'); + font-weight: 300; + font-style: italic; + ascent-override: 97%; +} +``` diff --git a/docs/versioned_docs/version-7.0/auth/auth0.md b/docs/versioned_docs/version-7.0/auth/auth0.md new file mode 100644 index 000000000000..a028f9418152 --- /dev/null +++ b/docs/versioned_docs/version-7.0/auth/auth0.md @@ -0,0 +1,89 @@ +--- +sidebar_label: Auth0 +--- + +# Auth0 Authentication + +To get started, run the setup command: + +```bash +yarn rw setup auth auth0 +``` + +This installs all the packages, writes all the files, and makes all the code modifications you need. +For a detailed explanation of all the api- and web-side changes that aren't exclusive to Auth0, see the top-level [Authentication](../authentication.md) doc. +For now, let's focus on Auth0's side of things. + +If you don't have an Auth0 account yet, now's the time to make one: navigate to https://auth0.com and sign up, then create an application. +When it asks you to choose an application type, choose SPA (single-page application). +Don't bother with the the quick start—just click the "Settings" tab. +We'll get some of our application's API keys here. + +You should see two of the four API keys we need right away: "Domain" and "Client ID". +Copy those over to your project's `.env` file as `AUTH0_DOMAIN` and `AUTH0_CLIENT_ID` respectively. + +There's one more on this page; scroll down to "Application URIs" and look for "Allowed Callback URLs". +With Auth0, when you log in or sign up, it'll redirect you to Auth0's hosted log-in or sign-up page, then back to your Redwood app. +But where in your Redwood app exactly? +Auth0 needs to know, and this setting tells it. + +We'll keep things simple for now and make it "http://localhost:8910", but feel free to configure it as you wish. +Paste "http://localhost:8910" in the text areas below "Allowed Callback URLs", "Allowed Logout URLs" and "Allowed Web Origins" then click "Save Changes" at the bottom of the page. +Copy this one over to your project's `.env` file too, as `AUTH0_REDIRECT_URI`. + +Ok, just one more to go: under "Applications" in the nav on the left, click "APIs". +There should be one there already. +We don't need to click into it; next to it's name ("Auth0 Management API" maybe) Auth0 thoughtfully shows what we need, the "API Audience". +Copy it into your project's `.env` file as `AUTH0_AUDIENCE`. +All together now: + +```bash title=".env" +AUTH0_DOMAIN="..." +AUTH0_CLIENT_ID="..." +AUTH0_REDIRECT_URI="http://localhost:8910" +AUTH0_AUDIENCE="..." +``` + +Lastly, include all these env vars in the list of env vars that should be available to the web side in `redwood.toml`: + +```toml title="redwood.toml" +[web] + # ... + includeEnvironmentVariables = [ + "AUTH0_DOMAIN", + "AUTH0_CLIENT_ID", + "AUTH0_REDIRECT_URI", + "AUTH0_AUDIENCE", + ] +``` + +That should be enough; now, things should just work. +Let's make sure: if this is a brand new project, generate a home page. +There we'll try to sign up by destructuring `signUp` from the `useAuth` hook (import that from `'src/auth'`). We'll also destructure and display `isAuthenticated` to see if it worked: + +``` +yarn rw g page home / +``` + +```tsx title="web/src/pages/HomePage.tsx" +import { useAuth } from 'src/auth' + +const HomePage = () => { + const { isAuthenticated, signUp } = useAuth() + + return ( + <> + {/* MetaTags, h1, paragraphs, etc. */} + + <p>{JSON.stringify({ isAuthenticated })}</p> + <button onClick={signUp}>sign up</button> + </> + ) +} +``` + +Clicking sign up should redirect you to Auth0: + +<img width="1522" alt="image" src="https://user-images.githubusercontent.com/32992335/209001246-244db949-31f8-42ff-804e-18f3e423ce89.png" /> + +After you sign up, you should be redirected back to your Redwood app, and you should see `{"isAuthenticated":true}` on the page. diff --git a/docs/versioned_docs/version-7.0/auth/azure.md b/docs/versioned_docs/version-7.0/auth/azure.md new file mode 100644 index 000000000000..ce872f914f25 --- /dev/null +++ b/docs/versioned_docs/version-7.0/auth/azure.md @@ -0,0 +1,182 @@ +--- +sidebar_label: Azure +--- + +# Azure Active Directory Authentication + +To get started, run the setup command: + +```bash +yarn rw setup auth azure-active-directory +``` + +This installs all the packages, writes all the files, and makes all the code +modifications you need. For a detailed explanation of all the api- and web-side +changes that aren't exclusive to Azure, see the top-level +[Authentication](../authentication.md) doc. For now, let's focus on Azure's +side of things. + +Follow the steps in [Single-page application: App registration](https://docs.microsoft.com/en-us/azure/active-directory/develop/scenario-spa-app-registration). +After registering your app, you'll be redirected to its "Overview" section. +We're interested in two credentials here, "Application (client) ID" and "Directory (tenant) ID". +Go ahead and copy "Application (client) ID" to your `.env` file as `AZURE_ACTIVE_DIRECTORY_CLIENT_ID`. +But "Directory (tenant) ID" needs a bit more explanation. + +Azure has an option called "Authority". It's a URL that specifies a directory that MSAL (Microsoft Authentication Library) can request tokens from. +You can read more about it [here](https://docs.microsoft.com/en-us/azure/active-directory/develop/msal-client-application-configuration#authority), +but to cut to the chase, you probably want `https://login.microsoftonline.com/${tenantId}` as your Authority, where `tenantId` is "Directory (tenant) ID". + +After substituting your app's "Directory (tenant) ID" in the URL, add it to your `.env` file as `AZURE_ACTIVE_DIRECTORY_AUTHORITY`. +All together now: + +```bash title=".env" +AZURE_ACTIVE_DIRECTORY_CLIENT_ID="..." +# Where `tenantId` is your app's "Directory (tenant) ID" +AZURE_ACTIVE_DIRECTORY_AUTHORITY="https://login.microsoftonline.com/${tenantId}" +``` + +Ok, back to [Single-page application: App registration](https://docs.microsoft.com/en-us/azure/active-directory/develop/scenario-spa-app-registration). +At the end, it says... + +> Next, configure the app registration with a Redirect URI to specify where the Microsoft identity platform should redirect the client along with any security tokens. +> Use the steps appropriate for the version of MSAL.js you're using in your application: +> +> - MSAL.js 2.0 with auth code flow (recommended) +> - MSAL.js 1.0 with implicit flow + +Redwood uses [MSAL.js 2.0 with auth code flow](https://learn.microsoft.com/en-us/azure/active-directory/develop/scenario-spa-app-registration#redirect-uri-msaljs-20-with-auth-code-flow), so follow the steps there next. +When it asks you for a Redirect URI, enter `http://localhost:8910` and `http://localhost:8910/login`, and copy these into your `.env` file as `AZURE_ACTIVE_DIRECTORY_REDIRECT_URI` and `AZURE_ACTIVE_DIRECTORY_LOGOUT_REDIRECT_URI`: + +:::tip Can't add multiple URI's? + +Configure one, then you'll be able to configure another. + +::: + +```bash title=".env" +AZURE_ACTIVE_DIRECTORY_CLIENT_ID="..." +# Where `tenantId` is your app's "Directory (tenant) ID" +AZURE_ACTIVE_DIRECTORY_AUTHORITY="https://login.microsoftonline.com/${tenantId}" +AZURE_ACTIVE_DIRECTORY_REDIRECT_URI="http://localhost:8910" +AZURE_ACTIVE_DIRECTORY_LOGOUT_REDIRECT_URI="http://localhost:8910/login" +``` + +That's it for .env vars. Don't forget to include them in the `includeEnvironmentVariables` array in `redwood.toml`: + +```toml title="redwood.toml" +[web] + # ... + includeEnvironmentVariables = [ + "AZURE_ACTIVE_DIRECTORY_CLIENT_ID", + "AZURE_ACTIVE_DIRECTORY_AUTHORITY", + "AZURE_ACTIVE_DIRECTORY_REDIRECT_URI", + "AZURE_ACTIVE_DIRECTORY_LOGOUT_REDIRECT_URI", + ] +``` + +Now let's make sure everything works: if this is a brand new project, generate +a home page. There we'll try to sign up by destructuring `signUp` from the +`useAuth` hook (import that from `'src/auth'`). We'll also destructure and +display `isAuthenticated` to see if it worked: + +``` +yarn rw g page home / +``` + +```tsx title="web/src/pages/HomePage.tsx" +import { useAuth } from 'src/auth' + +const HomePage = () => { + const { isAuthenticated, signUp } = useAuth() + + return ( + <> + {/* MetaTags, h1, paragraphs, etc. */} + + <p>{JSON.stringify({ isAuthenticated })}</p> + <button onClick={signUp}> + Sign Up + </button> + </> + ) +} +``` + +## Roles + +To add roles exposed via the `roles` claim, follow [Add app roles to your application and receive them in the token](https://docs.microsoft.com/en-us/azure/active-directory/develop/howto-add-app-roles-in-azure-ad-apps). + +## `logIn` Options + +`options` in `logIn(options?)` is of type [RedirectRequest](https://azuread.github.io/microsoft-authentication-library-for-js/ref/modules/_azure_msal_browser.html#redirectrequest) and is a good place to pass in optional [scopes](https://docs.microsoft.com/en-us/graph/permissions-reference#user-permissions) to be authorized. +By default, MSAL sets `scopes` to [/.default](https://docs.microsoft.com/en-us/azure/active-directory/develop/v2-permissions-and-consent#the-default-scope) which is built in for every application that refers to the static list of permissions configured on the application registration. Furthermore, MSAL will add `openid` and `profile` to all requests. In the example below we explicit include `User.Read.All` in the login scope. + +```jsx +await logIn({ + scopes: ['User.Read.All'], // becomes ['openid', 'profile', 'User.Read.All'] +}) +``` + +See [loginRedirect](https://azuread.github.io/microsoft-authentication-library-for-js/ref/classes/_azure_msal_browser.publicclientapplication.html#loginredirect), [PublicClientApplication](https://azuread.github.io/microsoft-authentication-library-for-js/ref/classes/_azure_msal_browser.publicclientapplication.html) class and [Scopes Behavior](https://github.com/AzureAD/microsoft-authentication-library-for-js/blob/dev/lib/msal-core/docs/scopes.md#scopes-behavior) for more documentation. + +## `getToken` Options + +`options` in `getToken(options?)` is of type [RedirectRequest](https://azuread.github.io/microsoft-authentication-library-for-js/ref/modules/_azure_msal_browser.html#redirectrequest). +By default, `getToken` will be called with scope `['openid', 'profile']`. +Since Azure Active Directory applies [incremental consent](https://github.com/AzureAD/microsoft-authentication-library-for-js/blob/dev/lib/msal-browser/docs/resources-and-scopes.md#dynamic-scopes-and-incremental-consent), we can extend the permissions from the login example by including another scope, for example `Mail.Read`: + +```js +await getToken({ + scopes: ['Mail.Read'], // becomes ['openid', 'profile', 'User.Read.All', 'Mail.Read'] +}) +``` + +See [acquireTokenSilent](https://azuread.github.io/microsoft-authentication-library-for-js/ref/classes/_azure_msal_browser.publicclientapplication.html#acquiretokensilent), [Resources and Scopes](https://github.com/AzureAD/microsoft-authentication-library-for-js/blob/dev/lib/msal-browser/docs/resources-and-scopes.md#resources-and-scopes) or [full class documentation](https://pub.dev/documentation/msal_js/latest/msal_js/PublicClientApplication-class.html#constructors) for more. + +## Azure Active Directory B2C-specific configuration + +You can design your own auth flow with Azure Active Directory B2C using [hosted user flows](https://docs.microsoft.com/en-us/azure/active-directory-b2c/add-sign-up-and-sign-in-policy?pivots=b2c-user-flow). +Using it requires two extra settings. + +#### Update the .env file: + +```bash title=".env" +AZURE_ACTIVE_DIRECTORY_AUTHORITY=https://{your-microsoft-tenant-name}.b2clogin.com/{{your-microsoft-tenant-name}}.onmicrosoft.com/{{your-microsoft-user-flow-id}} +AZURE_ACTIVE_DIRECTORY_JWT_ISSUER=https://{{your-microsoft-tenant-name}}.b2clogin.com/{{your-microsoft-tenant-id}}/v2.0/ +AZURE_ACTIVE_DIRECTORY_KNOWN_AUTHORITY=https://{{your-microsoft-tenant-name}}.b2clogin.com +``` + +Here's an example: + +```bash title=".env.example" +AZURE_ACTIVE_DIRECTORY_AUTHORITY=https://rwauthtestb2c.b2clogin.com/rwauthtestb2c.onmicrosoft.com/B2C_1_signupsignin1 +AZURE_ACTIVE_DIRECTORY_JWT_ISSUER=https://rwauthtestb2c.b2clogin.com/775527ef-8a37-4307-8b3d-cc311f58d922/v2.0/ +AZURE_ACTIVE_DIRECTORY_KNOWN_AUTHORITY=https://rwauthtestb2c.b2clogin.com +``` + +And don't forget to add `AZURE_ACTIVE_DIRECTORY_KNOWN_AUTHORITY` to the `includeEnvironmentVariables` array in `redwood.toml`. +(`AZURE_ACTIVE_DIRECTORY_JWT_ISSUER` is only used on the API side. But more importantly, it's sensitive—do *not* include it in the web side.) + +#### Update `activeDirectoryClient` instance + +This lets the MSAL web-side client know about our new B2C authority: + +```jsx title="web/src/auth.{js,ts}" +const azureActiveDirectoryClient = new PublicClientApplication({ + auth: { + clientId: process.env.AZURE_ACTIVE_DIRECTORY_CLIENT_ID, + authority: process.env.AZURE_ACTIVE_DIRECTORY_AUTHORITY, + redirectUri: process.env.AZURE_ACTIVE_DIRECTORY_REDIRECT_URI, + postLogoutRedirectUri: + process.env.AZURE_ACTIVE_DIRECTORY_LOGOUT_REDIRECT_URI, + // highlight-next-line + knownAuthorities: [process.env.AZURE_ACTIVE_DIRECTORY_KNOWN_AUTHORITY] + }, +}) +``` + +Now you can call the `logIn` and `logOut` functions from `useAuth()`, and everything should just work. + +Here's a few more links to relevant documentation for reference: +- [Overview of tokens in Azure Active Directory B2C](https://docs.microsoft.com/en-us/azure/active-directory-b2c/tokens-overview) +- [Working with MSAL.js and Azure AD B2C](https://github.com/AzureAD/microsoft-authentication-library-for-js/blob/dev/lib/msal-browser/docs/working-with-b2c.md) diff --git a/docs/versioned_docs/version-7.0/auth/clerk.md b/docs/versioned_docs/version-7.0/auth/clerk.md new file mode 100644 index 000000000000..06266d0b4774 --- /dev/null +++ b/docs/versioned_docs/version-7.0/auth/clerk.md @@ -0,0 +1,123 @@ +--- +sidebar_label: Clerk +--- + +# Clerk Authentication + +:::warning Did you set up Clerk a while ago? + +If you set up Clerk a while ago, you may be using a deprecated `authDecoder` that's subject to rate limiting. +This decoder will be removed in the next major. +There's a new decoder you can use right now! +See the [migration guide](https://github.com/redwoodjs/redwood/releases/tag/v5.3.2) for how to upgrade. + +::: + + +To get started, run the setup command: + +```text +yarn rw setup auth clerk +``` + +This installs all the packages, writes all the files, and makes all the code modifications you need. +For a detailed explanation of all the api- and web-side changes that aren't exclusive to Clerk, see the top-level [Authentication](../authentication.md) doc. +But for now, let's focus on Clerk's side of things. + +If you don't have a Clerk account yet, now's the time to make one: navigate to https://clerk.dev, sign up, and create an application. +The defaults are good enough to get us going, but feel free to configure things as you wish. +We'll get the application's API keys from its dashboard next. + +:::note we'll only focus on the development instance + +By default, Clerk applications have two instances, "Development" and "Production". +We'll only focus on the "Development" instance here, which is used for local development. +When you're ready to deploy, switch the instance the dashboard is displaying by clicking "Development" in the header at the top. +How you get your API keys to production depends on your deploy provider. + +::: + +After you create the application, you should be redirected to its dashboard where you should see the RedwoodJS logo. +Click on it and copy the two API keys it shows into your project's `.env` file: + +```bash title=".env" +CLERK_PUBLISHABLE_KEY="..." +CLERK_SECRET_KEY="..." +``` + +Lastly, in your project's `redwood.toml` file, include `CLERK_PUBLISHABLE_KEY` in the list of env vars that should be available to the web side: + +```toml title="redwood.toml" +[web] + # ... + includeEnvironmentVariables = [ + "CLERK_PUBLISHABLE_KEY", + ] +``` + +That should be enough; now, things should just work. +Let's make sure: if this is a brand new project, generate a home page: + +```bash +yarn rw g page Home / +``` + +There we'll try to sign up by destructuring `signUp` from the `useAuth` hook (import that from `'src/auth'`). We'll also destructure and display `isAuthenticated` to see if it worked: + +```tsx title="web/src/pages/HomePage/HomePage.tsx" +import { useAuth } from 'src/auth' + +const HomePage = () => { + const { isAuthenticated, signUp } = useAuth() + + return ( + <> + {/* MetaTags, h1, paragraphs, etc. */} + + <p>{JSON.stringify({ isAuthenticated })}</p> + <button onClick={signUp}>sign up</button> + </> + ) +} +``` + +Clicking sign up should open a sign-up box and after you sign up, you should see `{"isAuthenticated":true}` on the page. + + +## Customizing the session token + +There's not a lot to the default session token. +Besides the standard claims, the only thing it really has is the user's `id`. +Eventually, you'll want to customize it so that you can get back more information from Clerk. +You can do so by navigating to the "Sessions" section in the nav on the left, then clicking on "Edit" in the "Customize session token" box: + +![clerk_customize_session_token](https://github.com/redwoodjs/redwood/assets/32992335/6d30c616-b4d2-4b44-971b-8addf3b79e5a) + +As long as you're using the `clerkJwtDecoder` +all the properties you add will be available to the `getCurrentUser` function: + +```ts title="api/src/lib/auth.ts" +export const getCurrentUser = async ( + decoded, // 👈 All the claims you add will be available on the `decoded` object + // ... +) => { + decoded.myClaim... + + // ... +} +```` + +## Avoiding feature duplication + +Redwood's Clerk integration is based on [Clerk's React SDK](https://clerk.dev/docs/reference/clerk-react/installation). +This means that there's some duplication between the features in the SDK and the ones in `@redwoodjs/auth-clerk-web`. +For example, the SDK ha a `SignedOut` component that redirects a user away from a private page—very much like wrapping a route with Redwood's `Private` component. +We recommend you use Redwood's way of doing things as much as possible since it's much more likely to get along with the rest of the framework. + +## Deep dive: the `ClerkStatusUpdater` component + +With Clerk, there's a bit more going on in the `web/src/auth.tsx` file than other auth providers. +This is because Clerk is a bit unlike the other auth providers Redwood integrates with in that it puts an instance of its client SDK on the browser's `window` object. +That means Redwood has to wait for it to be ready. +With other providers, Redwood instantiates their client SDK in `web/src/auth.ts{x}`, then passes it to `createAuth`. +With Clerk, instead Redwood uses Clerk components and hooks, like `ClerkLoaded` and `useUser`, to update Redwood's auth context with the client when it's ready. diff --git a/docs/versioned_docs/version-7.0/auth/custom.md b/docs/versioned_docs/version-7.0/auth/custom.md new file mode 100644 index 000000000000..65b5b801ab11 --- /dev/null +++ b/docs/versioned_docs/version-7.0/auth/custom.md @@ -0,0 +1,301 @@ +--- +sidebar_label: Custom +--- + +# Custom Authentication + +If Redwood doesn't officially integrate with the auth provider you want to use, you're not out of luck just yet: Redwood has an API you can use to integrate your auth provider of choice. + +:::tip Were you using Nhost, magic.link, GoTrue, Okta or Wallet Connect (ethereum)? + +If you're here because you're using one of the providers Redwood used to support (Nhost, magic.link, GoTrue, Okta or Wallet Connect (Ethereum)), we've moved the code for them out into their own separate repos: + +- [Nhost](https://github.com/redwoodjs/auth-nhost) +- [magic.link](https://github.com/redwoodjs/auth-magiclink) +- [GoTrue](https://github.com/redwoodjs/auth-gotrue) +- [Okta](https://github.com/redwoodjs/auth-okta) +- [WalletConnect (Ethereum)](https://github.com/redwoodjs/auth-walletconnect) + +The code has been updated to work with the auth APIs introduced in v4, but it's mostly untested, so no guarantee it'll work. +But together with this doc, we hope getting one of the auth providers working won't be too difficult. + +::: + +When it comes to writing a custom auth integration, there's a little more work to do than just using one of the ready-made packages. But we'll walk you through all that work here, using [Nhost](https://nhost.io/) as an example. Hopefully you have auth up and running before too long! + +To get started, run the setup command: + +```bash +yarn rw setup auth custom +``` + +This makes all the code modifications it can, but whereas with other auth providers, all you have to do now is get your keys, here you have to write some code. + +Let's work on the web side first. +Here most of our time will be spent in the `web/src/auth.ts` file. +It comes commented to guide us, but we'll get into it here. +If you're using TypeScript, scroll past the boilerplate interfaces for now to get to our first task, instantiating the client: + +```ts title="web/src/auth.ts" +import { createAuthentication } from '@redwoodjs/auth' + +// ... + +// Replace this with the auth service provider client sdk +const client = { + login: () => ({ + id: 'unique-user-id', + email: 'email@example.com', + roles: [], + }), + signup: () => ({ + id: 'unique-user-id', + email: 'email@example.com', + roles: [], + }), + logout: () => {}, + getToken: () => 'super-secret-short-lived-token', + getUserMetadata: () => ({ + id: 'unique-user-id', + email: 'email@example.com', + roles: [], + }), +} +``` + +As the comment says, we need to replace this placeholder client object with an instance of our auth provider's client SDK. +Since we're using Nhost, it's time to navigate to [their docs](https://docs.nhost.io/reference/javascript) for a bit of reading. +We'll take all the work you have to do reading docs for granted here and cut to the chase—setting up Nhost's client looks like this: + +```ts +import { NhostClient } from '@nhost/nhost-js' + +const client = new NhostClient({ + backendUrl: '...' +}) +``` + +This means we have to install `@nhost/nhost-js` on the web side, so let's go ahead and do that: + +``` +yarn workspace web add @nhost/nhost-js +``` + +Then we'll have to make an account, an application, and get it's `backendUrl`. +On your application's dashboard, click "Settings" at the bottom of the the nav on the left, then "Environment Variables", and look for "NHOST_BACKEND_URL". +Copy its value into your project's `.env` file and include it in the list of env vars the web side has access to in your project's `redwood.toml` file: + +```bash title=".env" +NHOST_BACKEND_URL="..." +``` + +```toml title="redwood.toml" +[web] + # ... + includeEnvironmentVariables = ["NHOST_BACKEND_URL"] +``` + +Lastly, let's update `web/src/auth.ts`: + +```ts title="web/src/auth.ts" +import { createAuthentication } from '@redwoodjs/auth' + +import { NhostClient } from '@nhost/nhost-js' + +// ... + +const client = new NhostClient({ + backendUrl: process.env.NHOST_BACKEND_URL +}) +``` + +Ok, that's it for the client. +At this point, you could update some of the TS interfaces, but we'll leave that to you and press on with the integration. +Now we have to create the `useAuth` hook using the client we just made so that the rest of Redwood, like the router, works. +Scroll down a little more to the `createAuthImplementation` function: + +```ts title="web/src/auth.ts" +// This is where most of the integration work will take place. You should keep +// the shape of this object (i.e. keep all the key names) but change all the +// values/functions to use methods from the auth service provider client sdk +// you're integrating with +function createAuthImplementation(client: AuthClient) { + return { + type: 'custom-auth', + client, + login: async () => client.login(), + logout: async () => client.logout(), + signup: async () => client.signup(), + getToken: async () => client.getToken(), + /** + * Actual user metadata might look something like this + * { + * "id": "11111111-2222-3333-4444-5555555555555", + * "aud": "authenticated", + * "role": "authenticated", + * "roles": ["admin"], + * "email": "email@example.com", + * "app_metadata": { + * "provider": "email" + * }, + * "user_metadata": null, + * "created_at": "2016-05-15T19:53:12.368652374-07:00", + * "updated_at": "2016-05-15T19:53:12.368652374-07:00" + * } + */ + getUserMetadata: async () => client.getUserMetadata(), + } +} +``` + +This may seem like a lot, but it's actually not so bad: it's just about mapping the client's functions to these properties, many of which are pretty straightforward. +The fact that this is eventually the `useAuth` hook is hidden a bit—`createAuthImplementation` gets passed to `createAuthentication`, which returns the `AuthProvider` component and `useAuth` hook—but you don't have to concern yourself with that here. + +Again, let's take all the reading and trial and error you'll have to do for granted, though it may be long and tedious: + +```ts title="web/src/auth.ts" +function createAuthImplementation(client: AuthClient) { + return { + type: 'custom-auth', + client, + // See sign in options at https://docs.nhost.io/reference/javascript/auth/sign-in + login: async (options) => { + return await client.auth.signIn(options) + }, + // See sign out options at https://docs.nhost.io/reference/javascript/auth/sign-out + logout: async (options) => { + return await client.auth.signOut(options) + }, + // See sign up options at https://docs.nhost.io/reference/javascript/auth/sign-up + signup: async (options) => { + return await client.auth.signUp(options) + }, + getToken: async () => { + return (await client.auth.getJWTToken()) || null + }, + // See https://docs.nhost.io/reference/javascript/auth/get-user + getUserMetadata: async () => { + return await client.auth.getUser() + }, + restoreAuthState: async () => { + return await client.auth.refreshSession() + }, + } +} +``` + +That's it for the web side. +Let's head over to the api side. + +## api side + +Now that we've set up the web side, every GraphQL request includes a token. +But without a way to verify and decode that token, the api side doesn't know what to do with it, so let's start there. + +In `api/src/lib/auth.ts`, make an empty function, `authDecoder`. +Eventually we'll pass this to the `createGraphQLHandler` function in `api/src/graphql.ts`. +The GraphQL server calls it with two arguments, the token and the type. Both are strings: + +```ts title="api/src/lib/auth.ts" +export const authDecoder = async (token: string, type: string) => { + // decode token... +} +``` + +First, let's make sure that the type is the same as the type in `createAuthImplementation`, `'custom-auth'`. If it's not, we can call it quits: + +```ts title="api/src/lib/auth.ts" +export const authDecoder = async (token: string, type: string) => { + if (type !== 'custom-auth') { + return null + } + + // decode token... +} +``` + +Now let's verify and decode the token. +We'll use the npm module [jose](https://www.npmjs.com/package/jose) to do that; it has a `jwtVerify` function that does exactly what we want. +Go ahead and add it: + +``` +yarn workspace api add jose +``` + +For `jwtVerify` to do it's job, it needs the secret. +Time for another trip to your Nhost application's dashboard. +This time you're looking for "NHOST_JWT_SECRET". +Just like "NHOST_BACKEND_URL", it should be in "Settings", "Environment Variables". +(This one is a JSON object, with two properties, `type` and `key`. We just need `key`.) +Add that one to your project's `.env` file (no need to put it in `redwood.toml` though): + +```shell title=".env" +NHOST_JWT_SECRET="..." +``` + +Now we can use it in the `authDecoder`: + +```ts title="api/src/lib/auth.ts" +import { jwtVerify } from 'jose' + +export const authDecoder = async (token: string, type: string) => { + if (type !== 'custom-auth') { + return null + } + + const secret = new TextEncoder().encode(process.env.NHOST_JWT_SECRET) + + const decoded = await jwtVerify(token, secret) + + return decoded +} +``` + +Great—now we've got a way of decoding the token in requests coming from the web side. +Just one more important step that's easy to overlook: we have to pass this function to `createGraphQLHandler` in `api/src/functions/graphql.ts`: + +```ts title="api/src/functions/graphql.ts" +// highlight-next-line +import { authDecoder, getCurrentUser } from 'src/lib/auth' + +// ... + +export const handler = createGraphQLHandler({ + // highlight-next-line + authDecoder, + getCurrentUser, + // ... +}) +``` + +That should be enough; now, things should just work. +Let's make sure: if this is a brand new project, generate a home page. +There we'll try to sign up by destructuring `signUp` from the `useAuth` hook (import that from `'src/auth'`). We'll also destructure and display `isAuthenticated` to see if it worked: + +```tsx title="web/src/pages/HomePage.tsx" +import { useAuth } from 'src/auth' + +const HomePage = () => { + const { isAuthenticated, signUp } = useAuth() + + return ( + <> + {/* MetaTags, h1, paragraphs, etc. */} + + <p>{JSON.stringify({ isAuthenticated })}</p> + <button onClick={() => signUp({ + // email: 'your.email@email.com', + // password: 'super secret password', + })}>sign up</button> + </> + ) +} +``` + +Nhost doesn't redirect to a hosted sign-up page or open a sign-up modal. +In a real app, you'd build a form here, but we're going to hardcode an email and password. +One thing you may want to do before signing up: disable email verification, else you'll actually have to verify your email. +Go to back to "Settings" in your Nhost application, but this time click "Sign in methods". +There should be a checkbox there, "Require Verified Emails". +Toggle it off. +Now try signing up and you should see `{"isAuthenticated":true}` on the page. diff --git a/docs/versioned_docs/version-7.0/auth/dbauth.md b/docs/versioned_docs/version-7.0/auth/dbauth.md new file mode 100644 index 000000000000..7bb395831159 --- /dev/null +++ b/docs/versioned_docs/version-7.0/auth/dbauth.md @@ -0,0 +1,707 @@ +--- +sidebar_label: Self-hosted (dbAuth) +--- + +# Self-hosted Authentication (dbAuth) + +Redwood's own **dbAuth** provides several benefits: + +- Use your own database for storing user credentials +- Use your own login, signup and forgot password pages (or use Redwood's pre-built ones) +- Customize login session length +- No external dependencies +- No user data ever leaves your servers +- No additional charges/limits based on number of users +- No third party service outages affecting your site + +And potentially one large drawback: + +- Use your own database for storing user credentials + +However, we're following best practices for storing these credentials: + +1. Users' passwords are [salted and hashed](https://auth0.com/blog/adding-salt-to-hashing-a-better-way-to-store-passwords/) with PBKDF2 before being stored +2. Plaintext passwords are never stored anywhere, and only transferred between client and server during the login/signup phase (and hopefully only over HTTPS) +3. Our logger scrubs sensitive parameters (like `password`) before they are output +4. We only store the hashes of reset tokens + +Even if you later decide you want to let someone else handle your user data for you, dbAuth is a great option for getting up and running quickly (we even have a generator for creating basic login and signup pages for you). + +## How It Works + +dbAuth relies on good ol' fashioned cookies to determine whether a user is logged in or not. On an attempted login, a serverless function on the api-side checks whether a user exists with the given username (internally, dbAuth refers to this field as _username_ but you can use anything you want, like an email address). If a user with that username is found, does their salted and hashed password match the one in the database? + +If so, an [HttpOnly](https://owasp.org/www-community/HttpOnly), [Secure](https://owasp.org/www-community/controls/SecureCookieAttribute), [SameSite](https://owasp.org/www-community/SameSite) cookie (dbAuth calls this the "session cookie") is sent back to the browser containing the ID of the user. The content of the cookie is a simple string, but AES encrypted with a secret key (more on that later). + +When the user makes a GraphQL call, we decrypt the cookie and make sure that the user ID contained within still exists in the database. If so, the request is allowed to proceed. + +If there are any shenanigans detected (the cookie can't be decrypted properly, or the user ID found in the cookie does not exist in the database) the user is immediately logged out by expiring the session cookie. + +## Setup + +A single CLI command will get you everything you need to get dbAuth working, minus the actual login/signup pages: + +```bash +yarn rw setup auth dbAuth +``` + +You will be prompted to ask if you want to enable **WebAuthn** support. WebAuthn is an open standard for allowing authentication from devices like TouchID, FaceID, USB fingerprint scanners, and more. If you think you want to use WebAuthn, enter `y` at this prompt and read on configuration options. + +You can also add WebAuthn to an existing dbAuth install. [Read more about WebAuthn usage and config below](#webauthn). + +Read the post-install instructions carefully as they contain instructions for adding database fields for the hashed password and salt, as well as how to configure the auth serverless function based on the name of the table that stores your user data. Here they are, but could change in future releases (these do not include the additional WebAuthn required options, make sure you get those from the output of the `setup` command): + +> You will need to add a couple of fields to your User table in order to store a hashed password and salt: +> +> ``` +> model User { +> id Int @id @default(autoincrement()) +> email String @unique +> hashedPassword String // <─┐ +> salt String // <─┼─ add these lines +> resetToken String? // <─┤ +> resetTokenExpiresAt DateTime? // <─┘ +> } +> ``` +> +> If you already have existing user records you will need to provide a default value or Prisma complains, so change those to: +> +> ``` +> hashedPassword String @default("") +> salt String @default("") +> ``` +> +> You'll need to let Redwood know what field you're using for your users' `id` and `username` fields In this case we're using `id` and `email`, so update those in the `authFields` config in `/api/src/functions/auth.js` (this is also the place to tell Redwood if you used a different name for the `hashedPassword` or `salt` fields): +> +> ``` +> authFields: { +> id: 'id', +> username: 'email', +> hashedPassword: 'hashedPassword', +> salt: 'salt', +> resetToken: 'resetToken', +> resetTokenExpiresAt: 'resetTokenExpiresAt', +> }, +> ``` +> +> To get the actual user that's logged in, take a look at `getCurrentUser()` in `/api/src/lib/auth.js`. We default it to something simple, but you may use different names for your model or unique ID fields, in which case you need to update those calls (instructions are in the comment above the code). +> +> Finally, we created a `SESSION_SECRET` environment variable for you in `.env`. This value should NOT be checked into version control and should be unique for each environment you deploy to. If you ever need to log everyone out of your app at once change this secret to a new value. To create a new secret, run: +> +> ``` +> yarn rw g secret +> ``` +> +> Need simple Login, Signup and Forgot Password pages? Of course we have a generator for those: +> +> ``` +> yarn rw generate dbAuth +> ``` + +Note that if you change the fields named `hashedPassword` and `salt`, and you have some verbose logging in your app, you'll want to scrub those fields from appearing in your logs. See the [Redaction](logger.md#redaction) docs for info. + +## Scaffolding Login/Signup/Forgot Password Pages + +If you don't want to create your own login, signup and forgot password pages from scratch we've got a generator for that: + +```bash +yarn rw g dbAuth +``` + +Once again you will be asked if you want to create a WebAuthn-enabled version of the LoginPage. If so, enter `y` and follow the setup instructions. + +The default routes will make them available at `/login`, `/signup`, `/forgot-password`, and `/reset-password` but that's easy enough to change. Again, check the post-install instructions for one change you need to make to those pages: where to redirect the user to once their login/signup is successful. + +If you'd rather create your own, you might want to start from the generated pages anyway as they'll contain the other code you need to actually submit the login credentials or signup fields to the server for processing. + +## Configuration + +Almost all config for dbAuth lives in `api/src/functions/auth.js` in the object you give to the `DbAuthHandler` initialization. The comments above each key will explain what goes where. Here's an overview of the more important options: + +### allowedUserFields + +```javascript +allowedUserFields: ["id", "email"] +``` + +Most of the auth handlers accept a `user` argument that you can reference in the body of the function. These handlers also sometimes return that `user` object. As a security measure, `allowedUserFields` defines the only properties that will be available in that object so that sensitive data isn't accidentally leaked by these handlers to the client. + +:::info + +The `signup` and `forgotPassword` handlers return to the client whatever data is returned from their handlers, which can be used to display something like the email address that a verification email was just sent to. Without `allowedUserFields` it would be very easy to include the user's `hashedPassword` and `salt` in that response (just return `user` from those handlers) and then any customer could open the Web Inspector in their browser and see those values in plain text! + +::: + +`allowedUserFields` is defaulted to `id` and `email` but you can add any property on `user` to that list. + +### login.enabled + +Allow users to call login. Defaults to true. Needs to be explicitly set to false to disable the flow. + +```javascript +login: { + enabled: false +} +``` + +### login.handler() + +If you want to do something other than immediately let a user log in if their username/password is correct, you can add additional logic in `login.handler()`. For example, if a user's credentials are correct, but they haven't verified their email address yet, you can throw an error in this function with the appropriate message and then display it to the user. If the login should proceed, simply return the user that was passed as the only argument to the function: + +```javascript +login: { + handler: (user) => { + if (!user.verified) { + throw new Error('Please validate your email first!') + } else { + return user + } + } +} +``` + +### signup.enabled + +Allow users to sign up. Defaults to true. Needs to be explicitly set to false to disable the flow. + +```javascript +signup: { + enabled: false +} +``` + +### signup.handler() + +This function should contain the code needed to actually create a user in your database. You will receive a single argument which is an object with all of the fields necessary to create the user (`username`, `hashedPassword` and `salt`) as well as any additional fields you included in your signup form in an object called `userAttributes`: + +```javascript +signup: { + handler: ({ username, hashedPassword, salt, userAttributes }) => { + return db.user.create({ + data: { + email: username, + hashedPassword: hashedPassword, + salt: salt, + name: userAttributes.name, + }, + }) + } +} +``` + +Before `signup.handler()` is invoked, dbAuth will check that the username is unique in the database and throw an error if not. + +There are three things you can do within this function depending on how you want the signup to proceed: + +1. If everything is good and the user should be logged in after signup: return the user you just created +2. If the user is safe to create, but you do not want to log them in automatically: return a string, which will be returned by the `signUp()` function you called after destructuring it from `useAuth()` (see code snippet below) +3. If the user should _not_ be able to sign up for whatever reason: throw an error in this function with the message to be displayed + +You can deal with case #2 by doing something like the following in a signup component/page: + +```jsx +const { signUp } = useAuth() + +const onSubmit = async (data) => { + const response = await signUp({ ...data }) + + if (response.message) { + toast.error(response.message) // user created, but not logged in + } else { + toast.success('Welcome!') // user created and logged in + navigate(routes.dashboard()) + } +} +``` + +### signup.passwordValidation() + +This function is used to validate that the password supplied at signup meets certain criteria (length, randomness, etc.). By default it just returns `true` which means the password is always considered valid, even if only a single character (dbAuth features built-in validation that the password is not blank, an empty string, or made up of only spaces). Modify it to enforce whatever methodology you want on the password. + +If the password is valid, return `true`. Otherwise, throw the `PasswordValidationError` along with a (optional) message explaining why: + +```javascript +signup: { + passwordValidation: (password) => { + + if (password.length < 8) { + throw new PasswordValidationError('Password must be at least 8 characters') + } + + if (!password.match(/[A-Z]/)) { + throw new PasswordValidationError('Password must contain at least one capital letter') + } + + return true + } +} +``` + +For the best user experience you should include the same checks on the client side and avoid the roundtrip to the server altogether if the password is invalid. However, having the checks here makes sure that someone can't submit a user signup programmatically and skirt your password requirements. + +### forgotPassword.enabled + +Allow users to request a new password via a call to `forgotPassword`. Defaults to true. Needs to be explicitly set to false to disable the flow. +When disabling this flow you probably want to disable `resetPassword` as well. + +```javascript +forgotPassword: { + enabled: false +} +``` + +### forgotPassword.handler() + +This handler is invoked if a user is found with the username/email that they submitted on the Forgot Password page, and that user will be passed as an argument. Inside this function is where you'll send the user a link to reset their password—via an email is most common. The link will, by default, look like: + +``` +https://example.com/reset-password?resetToken=${user.resetToken} +``` + +If you changed the path to the Reset Password page in your routes you'll need to change it here. If you used another name for the `resetToken` database field, you'll need to change that here as well: + +``` +https://example.com/reset-password?resetKey=${user.resetKey} +``` + +> Note that although the user table contains a hash of `resetToken`, only for the handler, `user.resetToken` will contain the raw `resetToken` to use for generating a password reset link. + +### resetPassword.enabled + +Allow users to reset their password via a code from a call to `forgotPassword`. Defaults to true. Needs to be explicitly set to false to disable the flow. +When disabling this flow you probably want to disable `forgotPassword` as well. + +```javascript +resetPassword: { + enabled: false +} +``` + +### resetPassword.handler() + +This handler is invoked after the password has been successfully changed in the database. Returning something truthy (like `return user`) will automatically log the user in after their password is changed. If you'd like to return them to the login page and make them log in manually, `return false` and redirect the user in the Reset Password page. + +### usernameMatch + +This configuration allows you to perform a case insensitive check on a username at the point of db check. You will need to provide the configuration of your choice for both signup and login. + +```javascript +signup: { + usernameMatch: 'insensitive' +} +``` + +```javascript +login: { + usernameMatch: 'insensitive' +} +``` + +By default no setting is required. This is because each db has its own rules for enabling this feature. To enable please see the table below and pick the correct 'userMatchString' for your db of choice. + +| DB | Default | usernameMatchString | notes | +|---|---|---|---| +| Postgres | 'default' | 'insensitive' | | +| MySQL | 'case-insensitive' | N/A | turned on by default so no setting required | +| MongoDB | 'default' | 'insensitive' | +| SQLite | N/A | N/A | [Not Supported] Insensitive checks can only be defined at a per column level | +| Microsoft SQL Server | 'case-insensitive' | N/A | turned on by default so no setting required | + +### Cookie config + +These options determine how the cookie that tracks whether the client is authorized is stored in the browser. The default configuration should work for most use cases. If you serve your web and api sides from different domains you'll need to make some changes: set `SameSite` to `None` and then add [CORS configuration](#cors-config). + +```javascript +cookie: { + HttpOnly: true, + Path: '/', + SameSite: 'Strict', + Secure: true, + // Domain: 'example.com', +} +``` + +### CORS config + +If you're using dbAuth and your api and web sides are deployed to different domains then you'll need to configure CORS for both GraphQL in general and dbAuth. You'll also need to enable a couple of options to be sure and send/accept credentials in XHR requests. For more info, see the complete [CORS doc](cors.md#cors-and-authentication). + +### Error Messages + +There are several error messages that can be displayed, including: + +- Username/email not found +- Incorrect password +- Expired reset password token + +We've got some default error messages that sound nice, but may not fit the tone of your site. You can customize these error messages in `api/src/functions/auth.js` in the `errors` prop of each of the `login`, `signup`, `forgotPassword` and `resetPassword` config objects. The generated file contains tons of comments explaining when each particular error message may be shown. + +### WebAuthn Config + +See [WebAuthn Configuration](#function-config) section below. + +## Environment Variables + +### Cookie Domain + +By default, the session cookie will not have the `Domain` property set, which a browser will default to be the [current domain only](https://developer.mozilla.org/en-US/docs/Web/HTTP/Cookies#define_where_cookies_are_sent). If your site is spread across multiple domains (for example, your site is at `example.com` but your api-side is deployed to `api.example.com`) you'll need to explicitly set a Domain so that the cookie is accessible to both. + +To do this, set the `cookie.Domain` property in your `api/src/functions/auth.js` configuration, set to the root domain of your site, which will allow it to be read by all subdomains as well. For example: + +```json title="api/src/functions/auth.js" +cookie: { + HttpOnly: true, + Path: '/', + SameSite: 'Strict', + Secure: process.env.NODE_ENV !== 'development' ? true : false, + Domain: 'example.com' +} +``` + +### Session Secret Key + +If you need to change the secret key that's used to encrypt the session cookie, or deploy to a new target (each deploy environment should have its own unique secret key) we've got a CLI tool for creating a new one: + +``` +yarn rw g secret +``` + +Note that the secret that's output is _not_ appended to your `.env` file or anything else, it's merely output to the screen. You'll need to put it in the right place after that. + +:::warning .env and Version Control + +The `.env` file is set to be ignored by git and not committed to version control. There is another file, `.env.defaults`, which is meant to be safe to commit and contain simple ENV vars that your dev team can share. The encryption key for the session cookie is NOT one of these shareable vars! + +::: + +## WebAuthn + +[WebAuthn](https://webauthn.guide/) is a specification written by the W3C and FIDO with participation from Google, Mozilla, Microsoft, and others. It defines a standard way to use public key cryptography instead of a password to authenticate users. + +That's a very technical way of saying: users can log in with [TouchID](https://en.wikipedia.org/wiki/Touch_ID), [FaceID](https://en.wikipedia.org/wiki/Face_ID), [Windows Hello](https://support.microsoft.com/en-us/windows/learn-about-windows-hello-and-set-it-up-dae28983-8242-bb2a-d3d1-87c9d265a5f0), [Yubikey](https://www.yubico.com/), and more. + +<img width="401" alt="image" src="https://user-images.githubusercontent.com/300/174893269-2cbb1008-ab84-4121-80ee-cfa9250ba1bd.png" /> + +We'll refer to whatever biometric device that's used as simply a "device" below. The WebAuthn flow includes two "phases": + +1. **Registration**: the first time a new device is added for a user (a user can have multiple devices registered) +2. **Authentication**: the device is recognized and can be used to login on subsequent visits + +### User Experience + +The `LoginPage` generated by Redwood includes two new prompts on the login page, depending on the state of the user and whether they have registered their device yet or not: + +**Registration** + +The user is prompted to login with username/password: + +<img width="417" alt="image" src="https://user-images.githubusercontent.com/300/174903338-84ae504c-2e8c-444c-83aa-2cf60320c21e.png" /> + +Then asked if they want to enable WebAuthn: + +<img width="405" alt="image" src="https://user-images.githubusercontent.com/300/174903419-7a73fa35-c732-48c1-a8f9-6bfa801437e0.png" /> + +If so, they are shown the browser's prompt to scan: + +<img width="362" alt="image" src="https://user-images.githubusercontent.com/300/174903492-deae26db-232e-4712-a81b-4b703be12a4b.png" /> + +If they skip, they just proceed into the site as usual. If they log out and back in, they will be prompted to enable WebAuthn again. + +**Authentication** + +When a device is already registered then it can be used to skip username/password login. The user is immediately shown the prompt to scan when they land on the login page (if the prompt doesn't show, or they mistakenly cancel it, they can click "Open Authenticator" to show the prompt again) + +<img width="701" alt="image" src="https://user-images.githubusercontent.com/300/174904236-ccf6eba4-35ce-46e7-ad04-42eee43d3bba.png" /> + +They can also choose to go to use username/password credentials instead of their registered device. + +### How it Works + +The back and forth between the web and api sides works like this: + +**Registration** + +1. If the user selects to enable their device, a request is made to the server for "registration options" which is a JSON object containing details about the server and user (domain, username). +2. Your app receives that data and then makes a browser API call that says to start the biometric reader with the received options +3. The user scans their fingerprint/face and the browser API returns an ID representing this device, a public key and a few other fields for validation on the server +4. The ID, public key, and additional details are sent to the server to be verified. Assuming the are, the device is saved to the database in a `UserCredential` table (you can change the name if you want). The server responds by placing a cookie on the user's browser with the device ID (a random string of letters and numbers) + +A similar process takes place when authenticating: + +**Authentication** + +1. If the cookie from the previous process is present, the web side knows that the user has a registered device so a request is made to the server to get "authentication options" +2. The server looks up user who's credential ID is in the cookie and gets a list of all of the devices they have registered in the past. This is included along with the domain and username +3. The web side receives the options from the server and a browser API call is made. The browser first checks to see if the list of devices from the server includes the current device. If so, it prompts the user to scan their fingerprint/face (if the device is not in the list, the user will directed back to username/password signup) +4. The ID, public key, user details and a signature are sent to the server and checked to make sure the signature contains the expected data encrypted with the public key. If so, the regular login cookie is set (the same as if the user had used username/password login) + +In both cases, actual scanning and matching of devices is handled by the operating system: all we care about is that we are given a credential ID and a public key back from the device. + +### Browser Support + +WebAuthn is supported in the following browsers (as of July 2022): + +| OS | Browser | Authenticator | +| ------- | ------- | ------------- | +| macOS | Firefox | Yubikey Security Key NFC (USB), Yubikey 5Ci, SoloKey | +| macOS | Chrome | Touch ID, Yubikey Security Key NFC (USB), Yubikey 5Ci, SoloKey | +| iOS | All | Face ID, Touch ID, Yubikey Security Key NFC (NFC), Yubikey 5Ci | +| Android | Chrome | Fingerprint Scanner, caBLE | +| Android | Firefox | Screen PIN | + +### Configuration + +WebAuthn support requires a few updates to your codebase: + +1. Adding a `UserCredential` model +2. Adding configuration options in `api/src/functions/auth.js` +3. Adding a `client` to the `<AuthProvider>` in `App.js` +4. Adding an interface during the login process that prompts the user to enable their device + +:::info +If you setup dbAuth and generated the LoginPage with WebAuthn support then all of these steps have already been done for you! As described in the post-setup instructions you just need to add the required fields to your `User` model, create a `UserCredential` model, and you're ready to go! + +If you didn't setup WebAuthn at first, but decided you now want WebAuthn, you could run the setup and generator commands again with the `--force` flag to overwrite your existing files. Any changes you made will be overwritten, but if you do a quick diff in git you should be able to port over most of your changes. +::: + +### Schema Updates + +You'll need to add two fields to your `User` model, and a new `UserCredential` model to store the devices that are used and associate them with a user: + +```javascript title="api/db/schema.prisma" +datasource db { + provider = "sqlite" + url = env("DATABASE_URL") +} + +generator client { + provider = "prisma-client-js" + binaryTargets = "native" +} + +model User { + id Int @id @default(autoincrement()) + email String @unique + hashedPassword String + salt String + resetToken String? + resetTokenExpiresAt DateTime? + // highlight-start + webAuthnChallenge String? @unique + credentials UserCredential[] + // highlight-end +} + +// highlight-start +model UserCredential { + id String @id + userId Int + user User @relation(fields: [userId], references: [id]) + publicKey Bytes + transports String? + counter BigInt +} +// highlight-end +``` + +Run `yarn rw prisma migrate dev` to apply the changes to your database. + +:::warning Do Not Allow GraphQL Access to `UserCredential` + +As you can probably tell by the name, this new model contains secret credential info for the user. You **should not** make this data publicly available by adding an SDL file to `api/src/graphql`. + +Also: if you (re)generate the SDL for your `User` model, the generator will happily include the `credentials` relationship, assuming you want to allow access to that data (it does this automatically for all relaionships). This will result in an error and warning message in the console from the API server when it tries to read the new SDL file: the `User` SDL refers to a `UserCredential` type, which does not exist (there's no `userCredential.sdl.js` file to define it). + +If you see this notice after (re)generating, simply remove the following line from the `User` SDL: + +``` +credentials: [UserCredential]! +``` + +::: + +### Function Config + +Next we need to let dbAuth know about the new field and model names, as well as how you want WebAuthn to behave (see the highlighted section) + +```javascript title="api/src/functions/auth.js" +import { db } from 'src/lib/db' +import { DbAuthHandler } from '@redwoodjs/api' + +export const handler = async (event, context) => { + + // assorted handler config here... + + const authHandler = new DbAuthHandler(event, context, { + db: db, + authModelAccessor: 'user', + // highlight-start + credentialModelAccessor: 'userCredential', + // highlight-end + authFields: { + id: 'id', + username: 'email', + hashedPassword: 'hashedPassword', + salt: 'salt', + resetToken: 'resetToken', + resetTokenExpiresAt: 'resetTokenExpiresAt', + // highlight-start + challenge: 'webAuthnChallenge', + // highlight-end + }, + + cookie: { + HttpOnly: true, + Path: '/', + SameSite: 'Strict', + Secure: process.env.NODE_ENV !== 'development' ? true : false, + }, + + forgotPassword: forgotPasswordOptions, + login: loginOptions, + resetPassword: resetPasswordOptions, + signup: signupOptions, + + // highlight-start + webAuthn: { + enabled: true, + expires: 60 * 60 * 14, + name: 'Webauthn Test', + domain: + process.env.NODE_ENV === 'development' ? 'localhost' : 'server.com', + origin: + process.env.NODE_ENV === 'development' + ? 'http://localhost:8910' + : 'https://server.com', + type: 'platform', + timeout: 60000, + credentialFields: { + id: 'id', + userId: 'userId', + publicKey: 'publicKey', + transports: 'transports', + counter: 'counter', + }, + }, + // highlight-end + }) + + return await authHandler.invoke() +} +``` + +- `credentialModelAccessor` specifies the name of the accessor that you call to access the model you created to store credentials. If your model name is `UserCredential` then this field would be `userCredential` as that's how Prisma's naming conventions work. +- `authFields.challenge` specifies the name of the field in the user model that will hold the WebAuthn challenge string. This string is generated automatically whenever a WebAuthn registration or authentication request starts and is one more verification that the browser request came from this user. A user can only have one WebAuthn request/response cycle going at a time, meaning that they can't open a desktop browser, get the TouchID prompt, then switch to iOS Safari to use FaceID, then return to the desktop to scan their fingerprint. The most recent WebAuthn request will clobber any previous one that's in progress. +- `webAuthn.enabled` is a boolean, denoting whether the server should respond to webAuthn requests. If you decide to stop using WebAuthn, you'll want to turn it off here as well as update the LoginPage to stop prompting. +- `webAuthn.expires` is the number of seconds that a user will be allowed to keep using their fingerprint/face scan to re-authenticate into your site. Once this value expires, the user _must_ use their username/password to authenticate the next time, and then WebAuthn will be re-enabled (again, for this length of time). For security, you may want to log users out of your app after an hour of inactivity, but allow them to easily use their fingerprint/face to re-authenticate for the next two weeks (this is similar to login on macOS where your TouchID session expires after a couple of days of inactivity). In this example you would set `login.expires` to `60 * 60` and `webAuthn.expires` to `60 * 60 * 24 * 14`. +- `webAuthn.name` is the name of the app that will show in some browser's prompts to use the device +- `webAuthn.domain` is the name of domain making the request. This is just the domain part of the URL, ex: `app.server.com`, or in development mode `localhost` +- `webAuthn.origin` is the domain _including_ the protocol and port that the request is coming from, ex: [https://app.server.com](https://app.server.com) In development mode, this would be `http://localhost:8910` +- `webAuthn.type`: the type of device that's allowed to be used (see [next section below](#webauthn-type-option)) +- `webAuthn.timeout`: how long to wait for a device to be used in milliseconds (defaults to 60 seconds) +- `webAuthn.credentialFields`: lists the expected field names that dbAuth uses internally mapped to what they're actually called in your model. This includes 5 fields total: `id`, `userId`, `publicKey`, `transports`, `counter`. + +### WebAuthn `type` Option + +The config option `webAuthn.type` can be set to `any`, `platform` or `cross-platform`: + +- `platform` means to _only_ allow embedded devices (TouchID, FaceID, Windows Hello) to be used +- `cross-platform` means to _only_ allow third party devices (like a Yubikey USB fingerprint reader) +- `any` means to allow both platform and cross-platform devices + +In some browsers this can lead to a pretty drastic UX difference. For example, here is the interface in Chrome on macOS with the included TouchID sensor on a Macbook Pro: + +#### **any** + +<img width="446" alt="image" src="https://user-images.githubusercontent.com/300/174896660-c2960921-046c-49ad-8ff0-38c019569371.png" /> + +If you pick "Add a new Android Phone" you're presented with a QR code: + +<img width="445" alt="image" src="https://user-images.githubusercontent.com/300/174896265-bb513c82-56a7-4bbc-892e-97aa8a57f525.png" /> + +If you pick "USB Security Key" you're given the chance to scan your fingerprint in a 3rd party USB device: + +<img width="445" alt="image" src="https://user-images.githubusercontent.com/300/174896250-a0c447e7-c238-47bb-ab14-86b63385178e.png" /> + +And finally if you pick "This device" you're presented with the standard interface you'd get if used `platform` as your type: + +<img width="251" alt="image" src="https://user-images.githubusercontent.com/300/174895503-de913272-f219-4d28-9e86-ac6190785dfd.png" /> + +You'll have to decide if this UX tradeoff is worth it for your customers, as it can be pretty confusing when first presented with all of these options when someone is just used to using TouchID or FaceID. + +#### **platform** + +The `platform` option provides the simplest UI and one that users with a TouchID or FaceID will be immediately familiar with: + +<img width="251" alt="image" src="https://user-images.githubusercontent.com/300/174895503-de913272-f219-4d28-9e86-ac6190785dfd.png" /> + +Note that you can also fallback to use your user account password (on the computer itself) in addition to TouchID: + +<img width="251" alt="image" src="https://user-images.githubusercontent.com/300/174895743-24042578-4461-4c3b-b51c-8abc0325f065.png" /> + +Both the password and TouchID scan will count as the same device, so users can alternate between them if they want. + +#### **cross-platform** + +This interface is the same as `any`, but without the option to pick "This device": + +<img width="445" src="https://user-images.githubusercontent.com/300/174896275-066b163b-8203-4287-9e3f-ba545552dd22.png" /> + +So while the `any` option is the most flexible, it's also the most confusing to users. If you do plan on allowing any device, you may want to do a user-agent check and try to explain to users what the different options actually mean. + +The api-side is now ready to go. + +### App.js Updates + +If you generated your login/signup pages with `yarn rw g dbAuth --webauthn` then all of these changes are in place and you can start using WebAuthn right away! Otherwise, read on. + +First you'll need to import the `WebAuthnClient` and give it to the `<AuthProvider>` component: + +```jsx title="web/src/App.js" +import { AuthProvider } from '@redwoodjs/auth' +// highlight-start +import WebAuthnClient from '@redwoodjs/auth-dbauth-web/webAuthn' +// highlight-end +import { FatalErrorBoundary, RedwoodProvider } from '@redwoodjs/web' +import { RedwoodApolloProvider } from '@redwoodjs/web/apollo' + +import FatalErrorPage from 'src/pages/FatalErrorPage' +import Routes from 'src/Routes' + +import './scaffold.css' +import './index.css' + +const App = () => ( + <FatalErrorBoundary page={FatalErrorPage}> + <RedwoodProvider titleTemplate="%PageTitle | %AppTitle"> + // highlight-start + <AuthProvider type="dbAuth" client={WebAuthnClient}> + // highlight-end + <RedwoodApolloProvider> + <Routes /> + </RedwoodApolloProvider> + </AuthProvider> + </RedwoodProvider> + </FatalErrorBoundary> +) + +export default App +``` + +Now you're ready to access the functionality added by the WebAuthn client. The easiest way to do this would be to generate a new `LoginPage` with `yarn rw g dbAuth --webauthn`, even if it's in a brand new, throwaway app, and copy the pieces you need (or just replace your existing login page with it). + +The gist of building a login flow is that you now need to stop after username/password authentication and, if the browser supports WebAuthn, give the user the chance to register their device. If they come to the login page and already have the `webAuthn` cookie then you can show the prompt to authenticate, skipping the username/password form completely. This is all handled in the LoginPage template that Redwood generates for you. + +### WebAuthn Client API + +The `client` that we gave to the `AuthProvider` can be destructured from `useAuth()`: + +```javascript +const { isAuthenticated, client, logIn } = useAuth() +``` + +`client` gives you access to four functions for working with WebAuthn: + +- `client.isSupported()`: returns a Promise which resolves to a boolean—whether or not WebAuthn is supported in the current browser browser +- `client.isEnabled()`: returns a boolean for whether the user currently has a `webAuthn` cookie, which means this device has been registered already and can be used for login +- `client.register()`: returns a Promise which gets options from the server, presents the prompt to scan your fingerprint/face, and then sends the result up to the server. It will either resolve successfully with an object `{ verified: true }` or throw an error. This function is used when the user has not registered this device yet (`client.isEnabled()` returns `false`). +- `client.authenticate()`: returns a Promise which gets options from the server, presents the prompt to scan the user's fingerprint/face, and then sends the result up to the server. It will either resolve successfully with an object `{ verified: true }` or throw an error. This should be used when the user has already registered this device (`client.isEnabled()` returns `true`) diff --git a/docs/versioned_docs/version-7.0/auth/firebase.md b/docs/versioned_docs/version-7.0/auth/firebase.md new file mode 100644 index 000000000000..dc4b3a5d95aa --- /dev/null +++ b/docs/versioned_docs/version-7.0/auth/firebase.md @@ -0,0 +1,73 @@ +--- +sidebar_label: Firebase +--- + +# Firebase Authentication + +To get started, run the setup command: + +```bash +yarn rw setup auth firebase +``` + +This installs all the packages, writes all the files, and makes all the code modifications you need. +For a detailed explanation of all the api- and web-side changes that aren't exclusive to Firebase, see the top-level [Authentication](../authentication.md) doc. +For now, let's focus on Firebase's side of things. + +If you don't have a Firebase account yet, now's the time to make one: navigate to https://firebase.google.com and click "Go to console", sign up, and create a project. +After it's ready, we'll get the API keys. + +To get the API keys, we need to add a web app to our project. +Click the `</>` icon in main call to action on the dashboard—"Get started by adding Firebase to your app". +Give your app a nickname, then you should see the API keys. +Since we're only using Firebase for auth, we only need `apiKey`, `authDomain`, and `projectId`. +Copy them into your project's `.env` file: + +```bash title=".env" +FIREBASE_API_KEY="..." +FIREBASE_AUTH_DOMAIN="..." +FIREBASE_PROJECT_ID="..." +``` + +Lastly, include `FIREBASE_API_KEY` and `FIREBASE_AUTH_DOMAIN` in the list of env vars that should be available to the web side (`FIREBASE_PROJECT_ID` is for the api side): + +```toml title="redwood.toml" +[web] + # ... + includeEnvironmentVariables = ["FIREBASE_API_KEY", "FIREBASE_AUTH_DOMAIN"] +``` + +We've hooked up our Firebase app to our Redwood app, but if you try it now, it won't work. +That's because we haven't actually enabled auth in our Firebase app yet. + +Back to the dashboard one more time: in the nav on the left, click "Build", "Authentication", and "Get started". +We're going to go with "Email/Password" here, but feel free to configure things as you wish. +Click "Email/Password", enable it, and click "Save". + +That should be enough; now, things should just work. +Let's make sure: if this is a brand new project, generate a home page. +There we'll try to sign up by destructuring `signUp` from the `useAuth` hook (import that from `'src/auth'`). We'll also destructure and display `isAuthenticated` to see if it worked: + +```tsx title="web/src/pages/HomePage.tsx" +import { useAuth } from 'src/auth' + +const HomePage = () => { + const { isAuthenticated, signUp } = useAuth() + + return ( + <> + {/* MetaTags, h1, paragraphs, etc. */} + + <p>{JSON.stringify({ isAuthenticated })}</p> + <button onClick={() => signUp({ + // email: 'your.email@email.com', + // password: 'super secret password', + })}>sign up</button> + </> + ) +} +``` + +"Email/Password" says what it means: Firebase doesn't redirect to a hosted sign-up page or open a sign-up modal. +In a real app, you'd build a form here, but we're going to hardcode an email and password. +After you sign up, you should see `{"isAuthenticated":true}` on the page. diff --git a/docs/versioned_docs/version-7.0/auth/netlify.md b/docs/versioned_docs/version-7.0/auth/netlify.md new file mode 100644 index 000000000000..67a8efd5e1e6 --- /dev/null +++ b/docs/versioned_docs/version-7.0/auth/netlify.md @@ -0,0 +1,64 @@ +--- +sidebar_label: Netlify +--- + +# Netlify Identity Authentication + +To get started, run the setup command: + +```bash +yarn rw setup auth netlify +``` + +This installs all the packages, writes all the files, and makes all the code modifications you need. +For a detailed explanation of all the api- and web-side changes that aren't exclusive to Netlify Identity, see the top-level [Authentication](../authentication.md) doc. +For now let's focus on Netlify's side of things. + +There's a catch with Netlify Identity: your app has to be be deployed to Netlify to use it. +If this's a deal breaker for you, there are [other great auth providers to choose from](../authentication.md#official-integrations). +But here we'll assume it's not and that your app is already deployed. +(If it isn't, do that first, then come back. And yes, there's a setup command for that: `yarn rw setup deploy netlify`.) + +Once you've deployed your app, go to it's overview, click "Integrations" in the nav at the top, search for Netlify Identity, enable it, and copy the API endpoint in the Identity card. +(It should look something like `https://my-redwood-app.netlify.app/.netlify/identity`.) + +Let's do one more thing while we're here to make signing up later a little easier. +Right now, if we sign up, we'll have to verify our email address. +Let's forego that feature for the purposes of this doc: click "Settings and usage", then scroll down to "Emails" and look for "Confirmation template". +Click "Edit settings", tick the box next to "Allow users to sign up without verifying their email address", and click "Save". + +Netlify Identity works a little differently than the other auth providers in that you don't have to copy API keys to your project's `.env` and `redwood.toml` files. +Instead, the first time you use it (by, say, calling `signUp` from `useAuth`), it'll ask you for your app's API endpoint. +So let's go ahead and use it: if this is a brand new project, generate a home page. +There we'll try to sign up by destructuring `signUp` from the `useAuth` hook (import that from `'src/auth'`). We'll also destructure and display `isAuthenticated` to see if it worked: + +``` +yarn rw g page home / +``` + +```tsx title="web/src/pages/HomePage.tsx" +import { useAuth } from 'src/auth' + +const HomePage = () => { + const { isAuthenticated, signUp } = useAuth() + + return ( + <> + {/* MetaTags, h1, paragraphs, etc. */} + + <p>{JSON.stringify({ isAuthenticated })}</p> + <button onClick={signUp}>sign up</button> + </> + ) +} +``` + +Clicking sign up should open a modal; paste the API endpoint you copied earlier there: + +<img width="1522" alt="image" src="https://user-images.githubusercontent.com/32992335/209391973-239d5a12-649f-4e33-9098-cd297034f563.png" /> + +After that, you should see a sign-up modal. Go ahead and sign up: + +<img width="1522" alt="image" src="https://user-images.githubusercontent.com/32992335/209392156-e87a04b8-9ce8-4bc6-bc6b-92a2de8effe3.png" /> + +After you sign up, you should see `{"isAuthenticated":true}` on the page. diff --git a/docs/versioned_docs/version-7.0/auth/supabase.md b/docs/versioned_docs/version-7.0/auth/supabase.md new file mode 100644 index 000000000000..5b4e19903f77 --- /dev/null +++ b/docs/versioned_docs/version-7.0/auth/supabase.md @@ -0,0 +1,334 @@ +--- +sidebar_label: Supabase +--- +# Supabase Authentication + +To get started, run the setup command: + +```bash +yarn rw setup auth supabase +``` + +This installs all the packages, writes all the files, and makes all the code modifications you need. +For a detailed explanation of all the api- and web-side changes that aren't exclusive to Supabase, see the top-level [Authentication](../authentication.md) doc. For now, let's focus on Supabase's side of things. + +## Setup + +If you don't have a Supabase account yet, now's the time to make one: navigate to https://supabase.com and click "Start your project" in the top right. Then sign up and create an organization and a project. + +While Supabase creates your project, it thoughtfully shows your project's API keys. +(If the page refreshes while you're copying them over, just scroll down a bit and look for "Connecting to your new project".) +We're looking for "Project URL" and "API key" (the `anon`, `public` one). +Copy them into your project's `.env` file as `SUPABASE_URL` and `SUPABASE_KEY` respectively. + +There's one more we need, the "JWT Secret", that's not here. +To get that one, click the cog icon ("Project Settings") near the bottom of the nav on the left. +Then click "API", scroll down a bit, and you should see it—"JWT Secret" under "JWT Settings". +Copy it into your project's `.env` file as `SUPABASE_JWT_SECRET`. +All together now: + +```bash title=".env" +SUPABASE_URL="..." +SUPABASE_KEY="..." +SUPABASE_JWT_SECRET="..." +``` + +Lastly, in `redwood.toml`, include `SUPABASE_URL` and `SUPABASE_KEY` in the list of env vars that should be available to the web side: + +```toml title="redwood.toml" +[web] + # ... + includeEnvironmentVariables = ["SUPABASE_URL", "SUPABASE_KEY"] +``` + +## Authentication UI + +Supabase doesn't redirect to a hosted sign-up page or open a sign-up modal. +In a real app, you'd build a form here, but we're going to hardcode an email and password. + +### Basic Example + +After you sign up, head to your inbox: there should be a confirmation email from Supabase waiting for you. + +Click the link, then head back to your app. +Once you refresh the page, you should see `{"isAuthenticated":true}` on the page. + + +Let's make sure: if this is a brand new project, generate a home page. + +There we'll try to sign up by destructuring `signUp` from the `useAuth` hook (import that from `'src/auth'`). We'll also destructure and display `isAuthenticated` to see if it worked: + +```tsx title="web/src/pages/HomePage.tsx" +import { useAuth } from 'src/auth' + +const HomePage = () => { + const { isAuthenticated, signUp } = useAuth() + + return ( + <> + {/* MetaTags, h1, paragraphs, etc. */} + + <p>{JSON.stringify({ isAuthenticated })}</p> + <button onClick={() => signUp({ + email: 'your.email@email.com', + password: 'super secret password', + })}>sign up</button> + </> + ) +} +``` + +## Authentication Reference + +You will notice that [Supabase Javascript SDK Auth API](https://supabase.com/docs/reference/javascript/auth-api) reference documentation presents methods to sign in with the various integrations Supabase supports: password, OAuth, IDToken, SSO, etc. + +The RedwoodJS implementation of Supabase authentication supports these as well, but within the `logIn` method of the `useAuth` hook. + +That means that you will see that Supabase documents sign in with email password as: + +```ts +const { data, error } = await supabase.auth.signInWithPassword({ + email: 'example@email.com', + password: 'example-password', +}) +``` + +In RedwoodJS, you will always use `logIn` and pass the necessary credential options and also an `authMethod` to declare how you want to authenticate. + +```ts +const { logIn } = useAuth() + +await logIn({ + authMethod: 'password', + email: 'example@email.com', + password: 'example-password', +}) +``` + +### Sign Up with email and password + +Creates a new user. + +```ts +const { signUp } = useAuth() + +await signUp({ + email: 'example@email.com', + password: 'example-password', +}) +``` + +### Sign Up with email and password and additional user metadata + +Creates a new user with additional user metadata. + +```ts +const { signUp } = useAuth() + +await signUp({ +email: 'example@email.com', + password: 'example-password', + options: { + data: { + first_name: 'John', + age: 27, + } + } +}) +``` + +### Sign Up with email and password and a redirect URL + +Creates a new user with a redirect URL. + +```ts +const { signUp } = useAuth() + +await signUp({ +email: 'example@email.com', + password: 'example-password', + options: { + emailRedirectTo: 'https://example.com/welcome' + } +}) +``` + +### Sign in a user with email and password + +Log in an existing user with an email and password or phone and password. + +* Requires either an email and password or a phone number and password. + +```ts +const { logIn } = useAuth() + +await logIn({ + authMethod: 'password', + email: 'example@email.com', + password: 'example-password', +}) +``` + +### Sign in a user through Passwordless/OTP + +Log in a user using magiclink or a one-time password (OTP). + +* Requires either an email or phone number. + +* This method is used for passwordless sign-ins where a OTP is sent to the user's email or phone number. + +```ts +const { logIn } = useAuth() + +await logIn({ + authMethod: 'otp', + email: 'example@email.com', + options: { + emailRedirectTo: 'https://example.com/welcome' + } +}) +``` + +### Sign in a user through OAuth + +Log in an existing user via a third-party provider. + +* This method is used for signing in using a third-party provider. + +* Supabase supports many different [third-party providers](https://supabase.com/docs/guides/auth#providers). + +```ts +const { logIn } = useAuth() + +await logIn({ + authMethod: 'oauth', + provider: 'github', +}) +``` + +### Sign in a user with IDToken + +Log in a user using IDToken. + +```ts +const { logIn } = useAuth() + +await logIn({ + authMethod: 'id_token', + provider: 'apple', + token: 'cortland-apple-id-token', +}) +``` + +### Sign in a user with SSO + +Log in a user using IDToken. + +```ts +const { logIn } = useAuth() + +await logIn({ + authMethod: 'sso', + providerId: 'sso-provider-identity-uuid', + domain: 'example.com', +}) +``` + +### Get Current User + +Gets the content of the current user set by API side authentication. + +```ts +const { currentUser } = useAuth() + +<p>{JSON.stringify({ currentUser })}</p> +``` + +### Get Current User Metadata + +Gets content of the current Supabase user session, i.e., `auth.getSession()`. + +```ts +const { userMetadata } = useAuth() + +<p>{JSON.stringify({ userMetadata })}</p> +``` + +### Sign out a user + +Inside a browser context, signOut() will remove the logged in user from the browser session and log them out - removing all items from localStorage and then trigger a "SIGNED_OUT" event. + +In order to use the signOut() method, the user needs to be signed in first. + +```ts +const { logOut } = useAuth() + +logOut() +``` + +### Verify and log in through OTP + +Log in a user given a User supplied OTP received via mobile. + +* The verifyOtp method takes in different verification types. If a phone number is used, the type can either be sms or phone_change. If an email address is used, the type can be one of the following: signup, magiclink, recovery, invite or email_change. + +* The verification type used should be determined based on the corresponding auth method called before verifyOtp to sign up / sign-in a user. + + +The RedwoodJS auth provider doesn't expose the `veriftyOtp` method from the Supabase SDK directly. + +Instead, since you always have access the the Supabase Auth client, you can access any method it exposes. + +So, in order to use the `verifyOtp` method, you would: + +```ts +const { client } = useAuth() + +useEffect(() => { + const { data, error } = await client.verifyOtp({ phone, token, type: 'sms'}) +}, [client]) +``` + +### Access the Supabase Auth Client + +Sometimes you may need to access the Supabase Auth client directly. + +```ts +const { client } = useAuth() +``` + +You can then use it to work with Supabase sessions, or auth events. + +When using in a React component, you'll have to put any method that needs an `await` in a `useEffect()`. + +### Retrieve a session + +Returns the session, refreshing it if necessary. The session returned can be null if the session is not detected which can happen in the event a user is not signed-in or has logged out. + +```ts +const { client } = useAuth() + +useEffect(() => { + const { data, error } = await client.getSession() +}, [client]) +``` + +### Listen to auth events + +Receive a notification every time an auth event happens. + +* Types of auth events: `SIGNED_IN`, `SIGNED_OUT`, `TOKEN_REFRESHED`, `USER_UPDATED`, `PASSWORD_RECOVERY` + +```ts +const { client } = useAuth() + +useEffect(() => { + const { data: { subscription } } = client.onAuthStateChange((event, session) => { + console.log(event, session) + }) + + return () => { + subscription.unsubscribe() + } +}, [client]) +``` diff --git a/docs/versioned_docs/version-7.0/auth/supertokens.md b/docs/versioned_docs/version-7.0/auth/supertokens.md new file mode 100644 index 000000000000..8b0b6b97ca8a --- /dev/null +++ b/docs/versioned_docs/version-7.0/auth/supertokens.md @@ -0,0 +1,119 @@ +--- +sidebar_label: SuperTokens +--- + +# SuperTokens Authentication + +To get started, run the setup command: + +```bash +yarn rw setup auth supertokens +``` + +This installs all the packages, writes all the files, and makes all the code modifications you need. + +:::info + +You may have noticed that in `api/src/functions/auth.ts` there's an import from `'supertokens-node/framework/awsLambda'`. This is fine, even if your app isn't running in a serverless environment like AWS Lambda. In "serverful" environments, Redwood automatically handles the translation between Fastify's request and reply objects and functions' AWS Lambda signature. + +::: + +For a detailed explanation of all the api- and web-side changes that aren't exclusive to SuperTokens, see the top-level [Authentication](../authentication.md) doc. +For now, let's focus on SuperTokens's side of things. + +When you run the setup command it configures your app to support both email+password logins as well as social auth logins (Apple, GitHub and Google). Working with those social auth logins does require quite a few environment variables. And SuperTokens itself needs a couple variables too. Thankfully SuperTokens makes this very easy to setup as they provide values we can use for testing. + +# Environment variables + +The environment variables have to be added either to your project's `.env` file (when running in development environment), or to the environment variables of your hosting provider (when running in production). + +## Base setup + +```bash +SUPERTOKENS_APP_NAME="Redwoodjs App" # this will be used in the email template for password reset or email verification emails. +SUPERTOKENS_JWKS_URL=http://localhost:8910/.redwood/functions/auth/jwt/jwks.json +SUPERTOKENS_CONNECTION_URI=https://try.supertokens.io # set to the correct connection uri +``` + +## Production setup + +Assuming that your web side is hosted on `https://myapp.com`: + +```bash +SUPERTOKENS_WEBSITE_DOMAIN=https://myapp.com +SUPERTOKENS_JWKS_URL=https://myapp.com/.redwood/functions/auth/jwt/jwks.json +``` + +## Managed Supertokens service setup + +```bash +SUPERTOKENS_API_KEY=your-api-key # The value can be omitted when self-hosting Supertokens +``` + +## Social login setup +The following environment variables have to be set up (depending on the social login options): + +```bash +SUPERTOKENS_APPLE_CLIENT_ID=4398792-io.supertokens.example.service +SUPERTOKENS_APPLE_SECRET_KEY_ID=7M48Y4RYDL +SUPERTOKENS_APPLE_SECRET_PRIVATE_KEY=-----BEGIN PRIVATE KEY-----\nMIGTAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBHkwdwIBAQQgu8gXs+XYkqXD6Ala9Sf/iJXzhbwcoG5dMh1OonpdJUmgCgYIKoZIzj0DAQehRANCAASfrvlFbFCYqn3I2zeknYXLwtH30JuOKestDbSfZYxZNMqhF/OzdZFTV0zc5u5s3eN+oCWbnvl0hM+9IW0UlkdA\n-----END PRIVATE KEY----- +SUPERTOKENS_APPLE_SECRET_TEAM_ID=YWQCXGJRJL +SUPERTOKENS_GITHUB_CLIENT_ID=467101b197249757c71f +SUPERTOKENS_GITHUB_CLIENT_SECRET=e97051221f4b6426e8fe8d51486396703012f5bd +SUPERTOKENS_GOOGLE_CLIENT_ID=1060725074195-kmeum4crr01uirfl2op9kd5acmi9jutn.apps.googleusercontent.com +SUPERTOKENS_GOOGLE_CLIENT_SECRET=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 +``` + +## `redwood.toml` setup + +Make sure to modify `redwood.toml` to pass the required environment variables to the web side: + +```toml +[web] +... +includeEnvironmentVariables = [ + 'SUPERTOKENS_WEBSITE_DOMAIN', + 'SUPERTOKENS_API_DOMAIN', + 'SUPERTOKENS_API_GATEWAY_PATH', + 'SUPERTOKENS_APP_NAME' +] +``` + + +# Page setup + +Let's make sure: if this is a brand new project, generate a home page. +There we'll try to sign up by destructuring `signUp` from the `useAuth` hook (import that from `'src/auth'`). We'll also destructure and display `isAuthenticated` to see if it worked: + +``` +yarn rw g page home / +``` + +```tsx title="web/src/pages/HomePage.tsx" +import { useAuth } from 'src/auth' + +const HomePage = () => { + const { isAuthenticated, signUp } = useAuth() + + return ( + <> + {/* MetaTags, h1, paragraphs, etc. */} + + <p>{JSON.stringify({ isAuthenticated })}</p> + <button onClick={signUp}>sign up</button> + </> + ) +} + +export default HomePage +``` + +Clicking sign up should navigate you to `/auth` where SuperToken's default login/sign up UI is rendered. + +<img width="463" height="696" alt="SuperTokens default UI" src="https://user-images.githubusercontent.com/30793/215893664-d367eb3d-566e-4541-a01a-5772d95cc9c7.png" /> + +After you sign up, you should be redirected back to your Redwood app, and you should see `{"isAuthenticated":true}` on the page. + +## Troubleshooting + +If going to `http://localhost:8910/auth` results in the plain Javascript file being served instead of the expected auth page, rename the `web/src/auth.tsx` file to `web/src/authentication.tsx`, and update the imports (related to https://github.com/redwoodjs/redwood/issues/9740). diff --git a/docs/versioned_docs/version-7.0/authentication.md b/docs/versioned_docs/version-7.0/authentication.md new file mode 100644 index 000000000000..88506057eb2f --- /dev/null +++ b/docs/versioned_docs/version-7.0/authentication.md @@ -0,0 +1,203 @@ +--- +description: Set up an authentication provider +--- + +# Authentication + +Redwood has integrated auth end to end, from the web side to the api side. +On the web side, the router can protect pages via the `PrivateSet` component, and even restrict access at the role-level. +And if you'd prefer to work with the primitives, the `useAuth` hook exposes all the pieces to build the experience you want. + +Likewise, the api side is locked down by default: all SDLs are generated with the `@requireAuth` directive, ensuring that making things publicly available is something that you opt in to rather than out of. +You can also require auth anywhere in your Services, and even in your serverful or serverless functions. + +Last but not least, Redwood provides it's own self-hosted, full-featured auth provider: [dbAuth](./auth/dbauth.md). + +In this doc, we'll cover auth at a high level. +All auth providers share the same interface so the information here will be useful no matter which auth provider you use. + +## Official integrations + +Redwood has a simple API to integrate any auth provider you can think of. But to make it easier for you to get started, Redwood provides official integrations for some of the most popular auth providers out of the box: + +- [Auth0](./auth/auth0.md) +- [Azure Active Directory](./auth/azure.md) +- [Clerk](./auth/clerk.md) +- [Firebase](./auth/firebase.md) +- [Netlify](./auth/netlify.md) +- [Supabase](./auth/supabase.md) +- SuperTokens + +:::tip how to tell if an integration is official + +To tell if an integration is official, look for the `@redwoodjs` scope. +For example, Redwood's Auth0 integration comprises two npm packages: `@redwoodjs/auth-auth0-web` and `@redwoodjs/auth-auth0-api`. + +::: + +Other than bearing the `@redwoodjs` scope, the reason these providers are official is that we're committed to keeping them up to date. +You can set up any of them via the corresponding auth setup command: + +``` +yarn rw setup auth auth0 +``` + +## The API at a high-level + +We mentioned that Redwood has a simple API you can use to integrate any provider you want. +Whether you roll your own auth provider or choose one of Redwood's integrations, it's good to be familiar with it, so let's dive into it here. + +On the web side, there are two components that can be auth enabled: the `RedwoodApolloProvider` in `web/src/App.tsx` and the `Router` in `web/src/Routes.tsx`. +Both take a `useAuth` prop. If provided, they'll use this hook to get information about the app's auth state. The `RedwoodApolloProvider` uses it to get a token to include in every GraphQL request, and the `Router` uses it to determine if a user has access to private or role-restricted routes. + +When you set up an auth provider, the setup command makes a new file, `web/src/auth.ts`. This file's job is to create the `AuthProvider` component and the `useAuth` hook by integrating with the auth provider of your choice. Whenever you need access to the auth context, you'll import the `useAuth` hook from this file. The `RedwoodApolloProvider` and the `Router` are no exceptions: + +![web-side-auth](https://user-images.githubusercontent.com/32992335/208549951-469617d7-c798-4d9a-8a29-46efe23cca6a.png) + +Once auth is setup on the web side, every GraphQL request includes a JWT (JSON Web Token). +The api side needs a way of verifying and decoding this token if it's to do anything with it. +There are two steps to this process: + +- decoding the token +- mapping it into a user object + +The `createGraphQLHandler` function in `api/src/functions/graphql.ts` takes two props, `authDecoder` and `getCurrentUser`, for each of these steps (respectively): + +```ts title="api/src/functions/graphql.ts" +// highlight-next-line +import { authDecoder } from '@redwoodjs/auth-auth0-api' +import { createGraphQLHandler } from '@redwoodjs/graphql-server' + +import directives from 'src/directives/**/*.{js,ts}' +import sdls from 'src/graphql/**/*.sdl.{js,ts}' +import services from 'src/services/**/*.{js,ts}' + +// highlight-next-line +import { getCurrentUser } from 'src/lib/auth' +import { db } from 'src/lib/db' +import { logger } from 'src/lib/logger' + +export const handler = createGraphQLHandler({ + // highlight-start + authDecoder, + getCurrentUser, + // highlight-end + loggerConfig: { logger, options: {} }, + directives, + sdls, + services, + onException: () => { + // Disconnect from your database with an unhandled exception. + db.$disconnect() + }, +}) +``` + +### Destructuring the `useAuth` hook + +That was auth at a high level. +Now for a few more details on something you'll probably use a lot, the `useAuth` hook. + +The `useAuth` hook provides a streamlined interface to your auth provider's client SDK. +Much of what the functions it returns do is self explanatory, but the options they take depend on the auth provider: + +| Name | Description | +| :---------------- | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `client` | The client instance used in creating the auth provider. Most of the functions here use this under the hood | +| `currentUser` | An object containing information about the current user as set on the `api` side, or if the user isn't authenticated, `null` | +| `getToken` | Returns a JWT | +| `hasRole` | Determines if the current user is assigned a role like `"admin"` or assigned to any of the roles in an array | +| `isAuthenticated` | A boolean indicating whether or not the user is authenticated | +| `loading` | If the auth context is loading | +| `logIn` | Logs a user in | +| `logOut` | Logs a user out | +| `reauthenticate` | Refetch auth data and context. (This one is called internally and shouldn't be something you have to reach for often) | +| `signUp` | Signs a user up | +| `userMetadata` | An object containing the user's metadata (or profile information), fetched directly from an instance of the auth provider client. Or if the user isn't authenticated, `null` | + +### Protecting routes + +You can require that a user be authenticated to navigate to a route by wrapping it in the `PrivateSet` component. +An unauthenticated user will be redirected to the route specified in either component's `unauthenticated` prop: + +```tsx title="web/src/Routes.tsx" +import { Router, Route, PrivateSet } from '@redwoodjs/router' + +const Routes = () => { + return ( + <Router> + <Route path="/" page={HomePage} name="home" /> + <Route path="/login" page={LoginPage} name="login" /> + + // highlight-next-line + <PrivateSet unauthenticated="login"> + <Route path="/admin" page={AdminPage} name="admin" /> + <Route path="/secret-page" page={SecretPage} name="secret" /> + </PrivateSet> + </Router> + ) +} +``` + +You can also restrict access by role by passing a role or an array of roles to the `PrivateSet` component's `hasRole` prop: + +```tsx title="web/src/Routes.tsx" +import { Router, Route, PrivateSet } from '@redwoodjs/router' + +const Routes = () => { + return ( + <Router> + <Route path="/" page={HomePage} name="home" /> + <Route path="/login" page={LoginPage} name="login" /> + <Route path="/forbidden" page={ForbiddenPage} name="forbidden" /> + + <PrivateSet unauthenticated="login"> + <Route path="/secret-page" page={SecretPage} name="secret" /> + </PrivateSet> + + // highlight-next-line + <PrivateSet unauthenticated="forbidden" hasRole="admin"> + <Route path="/admin" page={AdminPage} name="admin" /> + </PrivateSet> + + // highlight-next-line + <PrivateSet unauthenticated="forbidden" hasRole={['author', 'editor']}> + <Route path="/posts" page={PostsPage} name="posts" /> + </PrivateSet> + </Router> + ) +} +``` + +### api-side currentUser + +We briefly mentioned that GraphQL requests include an `Authorization` header in every request when a user is authenticated. +The api side verifies and decodes the token in this header via the `authDecoder` function. +While information about the user is technically available at this point, it's still pretty raw. +You can map it into a real user object via the `getCurrentUser` function. +Both these functions are passed to the `createGraphQLHandler` function in `api/src/functions/graphql.ts`: + +```ts title="api/src/functions/graphql.ts" +export const handler = createGraphQLHandler({ + authDecoder, + getCurrentUser, + // ... +}) + +``` + +If you're using one of Redwood's official integrations, `authDecoder` comes from the corresponding integration package (in auth0's case, `@redwoodjs/auth-auth0-api`): + +```ts +import { authDecoder } from '@redwoodjs/auth-auth0-api' +``` + +If you're rolling your own, you'll have to write it yourself. See the [Custom Auth](./auth/custom.md#api-side) docs for an example. + +It's always up to you to write `getCurrentUser`, though the setup command will stub it out for you in `api/src/lib/auth.ts` with plenty of guidance. + +`getCurrentUser`'s return is made globally available in the api side's context via `context.currentUser` for convenience. + +### Locking down the GraphQL api + +Use the `requireAuth` and `skipAuth` [GraphQL directives](directives#secure-by-default-with-built-in-directives) to protect individual GraphQL calls. diff --git a/docs/versioned_docs/version-7.0/builds.md b/docs/versioned_docs/version-7.0/builds.md new file mode 100644 index 000000000000..6af668900a27 --- /dev/null +++ b/docs/versioned_docs/version-7.0/builds.md @@ -0,0 +1,37 @@ +--- +description: What happens when you build your app +--- +# Builds + +> ⚠ **Work in Progress** ⚠️ +> +> There's more to document here. In the meantime, you can check our [community forum](https://community.redwoodjs.com/search?q=yarn%20rw%20build) for answers. +> +> Want to contribute? Redwood welcomes contributions and loves helping people become contributors. +> You can edit this doc [here](https://github.com/redwoodjs/redwoodjs.com/blob/main/docs/builds.md). +> If you have any questions, just ask for help! We're active on the [forums](https://community.redwoodjs.com/c/contributing/9) and on [discord](https://discord.com/channels/679514959968993311/747258086569541703). + + +## API + +The api side of Redwood is transpiled by Babel into the `./api/dist` folder. + +### Steps on Netlify + +To emulate Netlify's build steps locally: + +```bash +yarn rw build api +cd api +yarn zip-it-and-ship-it dist/functions/ zipballs/ +``` + +Each lambda function in `./api/dist/functions` is parsed by zip-it-and-ship-it resulting in a zip file per lambda function that contains all the dependencies required for that lambda function. + +>Note: The `@netlify/zip-it-and-ship-it` package needs to be installed as a dev dependency in `api/`. Use the command `yarn workspace api add -D @netlify/zip-it-and-ship-it`. +>- You can learn more about the package [here](https://www.npmjs.com/package/@netlify/zip-it-and-ship-it). +>- For more information on AWS Serverless Deploy see these [docs](/docs/deploy/serverless). + +## Web + +The web side of Redwood is packaged by Webpack into the `./web/dist` folder. diff --git a/docs/versioned_docs/version-7.0/cells.md b/docs/versioned_docs/version-7.0/cells.md new file mode 100644 index 000000000000..0a7377cd6b54 --- /dev/null +++ b/docs/versioned_docs/version-7.0/cells.md @@ -0,0 +1,412 @@ +--- +description: Declarative data fetching with Cells +--- +# Cells + +Cells are a declarative approach to data fetching and one of Redwood's signature modes of abstraction. +By providing conventions around data fetching, Redwood can get in between the request and the response to do things like query optimization and more, all without you ever having to change your code. + +While it might seem like there's a lot of magic involved, all a Cell really does is execute a GraphQL query and manage its lifecycle. +The idea is that, by exporting named constants that declare what you want your UI to look like throughout a query's lifecycle, +Redwood can assemble these into a component template at build-time using a Babel plugin. +All without you having to write a single line of imperative code! + +## Generating a Cell + +You can generate a Cell with Redwood's Cell generator: + +```bash +yarn rw generate cell <name> +``` + +This creates a directory named `<name>Cell` in `web/src/components` with four files: + +| File | Description | +| :---------------------- | :------------------------------------------------------ | +| `<name>Cell.js` | The actual Cell | +| `<name>Cell.test.js` | Jest tests for each state of the Cell | +| `<name>Cell.stories.js` | Storybook stories for each state of the Cell | +| `<name>Cell.mock.js` | Mock data for both the Jest tests and Storybook stories | + +### Single Item Cell vs List Cell + +Sometimes you want a Cell that renders a single item and other times you want a Cell that renders a list. +Redwood's Cell generator can do both. + +First, it detects if `<name>` is singular or plural. +For example, to generate a Cell that renders a list of users, run `yarn rw generate cell users`. +Second, for irregular words whose singular and plural are the same, such as "equipment" or "pokemon", you can pass `--list` to tell Redwood to generate a list Cell explicitly: + +```bash +yarn rw generate cell equipment --list +``` + +## Cells in-depth + +Cells exports five constants: `QUERY`, `Loading` , `Empty` , `Failure` and `Success`. The root query in `QUERY` is the same as `<name>` so that, if you're generating a cell based on a model in your `schema.prisma`, you can get something out of the database right away. But there's a good chance you won't generate your Cell this way, so if you need to, make sure to change the root query. See the [Cells](tutorial/chapter2/cells.md#our-first-cell) section of the Tutorial for a great example of this. + +## Usage + +With Cells, you have a total of seven exports to work with: + +| Name | Type | Description | +| :------------ | :---------------- | :----------------------------------------------------------- | +| `QUERY` | `string,function` | The query to execute | +| `beforeQuery` | `function` | Lifecycle hook; prepares variables and options for the query | +| `isEmpty` | `function` | Lifecycle hook; decides if the Cell should render Empty | +| `afterQuery` | `function` | Lifecycle hook; sanitizes data returned from the query | +| `Loading` | `component` | If the request is in flight, render this component | +| `Empty` | `component` | If there's no data (`null` or `[]`), render this component | +| `Failure` | `component` | If something went wrong, render this component | +| `Success` | `component` | If the data has loaded, render this component | + +Only `QUERY` and `Success` are required. If you don't export `Empty`, empty results are sent to `Success`, and if you don't export `Failure`, error is output to the console. + +In addition to displaying the right component at the right time, Cells also funnel the right props to the right component. `Loading`, `Empty`, `Failure`, and `Success` all have access to the props passed down from the Cell in good ol' React fashion, and most of the `useQuery` hook's return as a prop called `queryResult`. In addition to all those props, `Empty` and `Success` also get the `data` returned from the query and an `updating` prop indicating whether the Cell is currently fetching new data. `Failure` also gets `updating` and exclusive access to `error` and `errorCode`. + +We mentioned above that Cells receive "most" of what's returned from the `useQuery` hook. You can see exactly what `useQuery` returns in Apollo Client's [API reference](https://www.apollographql.com/docs/react/api/react/hooks/#result). Again note that `error` and `data` get some special treatment. + +### QUERY + +`QUERY` can be a string or a function. If `QUERY` is a function, it has to return a valid GraphQL document. + +It's more-than ok to have more than one root query. Here's an example: + +```jsx {7-10} +export const QUERY = gql`{ + query { + posts { + id + title + } + authors { + id + name + } + } +} +``` + +So in this case, both `posts` and `authors` would be available to `Success`: + +```jsx +export const Success = ({ posts, authors }) => { + // ... +} +``` + +Normally queries have variables. Cells are setup to use any props they receive from their parent as variables (things are setup this way in `beforeQuery`). For example, here `BlogPostCell` takes a prop, `numberToShow`, so `numberToShow` is just available to your `QUERY`: + +```jsx {7} +import BlogPostsCell from 'src/components/BlogPostsCell' + +const HomePage = () => { + return ( + <div> + <h1>Home</h1> + <BlogPostsCell numberToShow={3} /> + </div> + ) +} + +export default HomePage +``` + +```jsx {2-3} +export const QUERY = gql` + query($numberToShow: Int!) { + posts(numberToShow: $numberToShow) { + id + title + } + } +` +``` + +This means you can think backwards about your Cell's props from your SDL: whatever the variables in your SDL are, that's what your Cell's props should be. + +### beforeQuery + +`beforeQuery` is a lifecycle hook. The best way to think about it is as a chance to configure [Apollo Client's `useQuery` hook](https://www.apollographql.com/docs/react/api/react/hooks#options). + +By default, `beforeQuery` gives any props passed from the parent component to `QUERY` so that they're available as variables for it. It'll also set the fetch policy to `'cache-and-network'` since we felt it matched the behavior users want most of the time: + +```jsx +export const beforeQuery = (props) => { + return { + variables: props, + fetchPolicy: 'cache-and-network' + } +} +``` + +For example, if you wanted to turn on Apollo's polling option, and prevent caching, you could export something like this (see Apollo's docs on [polling](https://www.apollographql.com/docs/react/data/queries/#polling) and [caching](https://www.apollographql.com/docs/react/data/queries/#setting-a-fetch-policy)) + +```jsx +export const beforeQuery = (props) => { + return { variables: props, fetchPolicy: 'no-cache', pollInterval: 2500 } +} +``` + +You can also use `beforeQuery` to populate variables with data not included in the Cell's props (like from React's Context API or a global state management library). If you provide a `beforeQuery` function, the Cell will automatically change the type of its props to match the first argument of the function. + +```jsx +// The Cell will take no props: <Cell /> +export const beforeQuery = () => { + const { currentUser } = useAuth() + + return { + variables: { userId: currentUser.id } + } +} +``` + +```jsx +// The cell will take 1 prop named "word" that is a string: <Cell word="abc"> +export const beforeQuery = ({ word }: { word: string }) => { + return { + variables: { magicWord: word } + } +} +``` + +### isEmpty + +`isEmpty` is an optional lifecycle hook. It returns a boolean to indicate if the Cell should render empty. Use it to override the default check, which checks if the Cell's root fields are null or empty arrays. + +It receives two parameters: 1) the `data`, and 2) an object that has the default `isEmpty` function, named `isDataEmpty`, so that you can extend the default: + +```jsx +export const isEmpty = (data, { isDataEmpty }) => { + return isDataEmpty(data) || data?.blog?.status === 'hidden' +} +``` + +### afterQuery + +`afterQuery` is a lifecycle hook. It runs just before data gets to `Success`. +Use it to sanitize data returned from `QUERY` before it gets there. + +By default, `afterQuery` just returns the data as it is: + +### Loading + +If there's no cached data and the request is in flight, a Cell renders `Loading`. + +When you're developing locally, you can catch your Cell waiting to hear back for a moment if set your speed in the Inspector's **Network** tab to something like "Slow 3G". + +But why bother with Slow 3G when Redwood comes with Storybook? Storybook makes developing components like `Loading` (and `Failure`) a breeze. We don't have to put up with hacky workarounds like Slow 3G or intentionally breaking our app just to develop our components. + +### Empty + +A Cell renders this component if there's no data. +By no data, we mean if the response is 1) `null` or 2) an empty array (`[]`). + +### Failure + +A Cell renders this component if something went wrong. You can quickly see this in action if you add an untyped field to your `QUERY`: + +```jsx {6} +const QUERY = gql` + query { + posts { + id + title + unTypedField + } + } +` +``` + +But, like `Loading`, Storybook is probably a better place to develop this. + +<!-- In development, we have it so that errors blanket the page. +In production, failed cells won't break your app, they'll just be empty divs... --> + +In this example, we use the `errorCode` to conditionally render the error heading title, and we also use it for our translation string. +```jsx +export const Failure = ({ error, errorCode }: CellFailureProps) => { + const { t } = useTranslation() + return ( + <div style={{ color: 'red' }}> + {errorCode === 'NO_CONFIG' ? <h1>NO_CONFIG</h1> : <h1>ERROR</h1>} + Error: {error.message} - Code: {errorCode} - {t(`error.${errorCode}`)} + </div> + ) +} +``` + +### Success + +If everything went well, a Cell renders `Success`. + +As mentioned, Success gets exclusive access to the `data` prop. But if you try to destructure it from `props`, you'll notice that it doesn't exist. This is because Redwood adds a layer of convenience: Redwood spreads `data` into `Success` so that you can just destructure whatever data you were expecting from your `QUERY` directly. + +So, if you're querying for `posts` and `authors`, instead of doing: + +```jsx +export const Success = ({ data }) => { + const { posts, authors } = data + + // ... +} +``` + +Redwood lets you do: + +```jsx +export const Success = ({ posts, authors }) => { + // ... +} +``` + +Note that you can still pass any other props to `Success`. After all, it's just a React component. + +:::tip + +Looking for info on how TypeScript works with Cells? Check out the [Utility Types](typescript/utility-types.md#cells) doc. + +::: + +### When should I use a Cell? + +Whenever you want to fetch data. Let Redwood juggle what's displayed when. You just focus on what those things should look like. + +While you can use a Cell whenever you want to fetch data, it's important to note that you don't have to. You can do anything you want! For example, for one-off queries, there's always `useApolloClient`. This hook returns the client, which you can use to execute queries, among other things: + +```jsx +// In a react component... + +client = useApolloClient() + +client.query({ + query: gql` + ... + ` +}) +``` + +### Can I Perform a Mutation in a Cell? + +Absolutely. We do so in our [example todo app](https://github.com/redwoodjs/example-todo/blob/f29069c9dc89fa3734c6f99816442e14dc73dbf7/web/src/components/TodoListCell/TodoListCell.js#L26-L44). +We also don't think it's an anti-pattern to do so. Far from it—your cells might end up containing a lot of logic and really serve as the hub of your app in many ways. + +It's also important to remember that, besides exporting certain things with certain names, there aren't many rules around Cells—everything you can do in a regular component still goes. + +## How Does Redwood Know a Cell is a Cell? + +You just have to end a filename in "Cell" right? Well, while that's basically correct, there is one other thing you should know. + +Redwood looks for all files ending in "Cell" (so if you want your component to be a Cell, its filename does have to end in "Cell"), but if the file 1) doesn't export a const named `QUERY` and 2) has a default export, then it'll be skipped. + +When would you want to do this? If you just want a file to end in "Cell" for some reason. Otherwise, don't worry about it! + +<!-- Source: https://github.com/redwoodjs/redwood/pull/597 --> +<!-- Source: https://github.com/redwoodjs/redwood/pull/554 --> +<!-- Code: https://github.com/redwoodjs/redwood/blob/60cb628d5f369d62607fa2f47c694d9a5c00540d/packages/core/config/babel-preset.js#L132-L136 --> +<!-- Code: https://github.com/redwoodjs/redwood/blob/60cb628d5f369d62607fa2f47c694d9a5c00540d/packages/core/src/babel-plugin-redwood-cell.ts#L58-L60 --> + +## Advanced Example: Implementing a Cell Yourself + +If we didn't do all that build-time stuff for you, how might you go about implementing a Cell yourself? + +Consider the [example from the Tutorial](tutorial/chapter2/cells.md#our-first-cell) where we're fetching posts: + +```jsx +export const QUERY = gql` + query { + posts { + id + title + body + createdAt + } + } +` + +export const Loading = () => <div>Loading...</div> + +export const Empty = () => <div>No posts yet!</div> + +export const Failure = ({ error }) => ( + <div>Error loading posts: {error.message}</div> +) + +export const Success = ({ posts }) => { + return posts.map((post) => ( + <article> + <h2>{post.title}</h2> + <div>{post.body}</div> + </article> + )) +} +``` + +And now let's say that Babel isn't going to come along and assemble our exports. What might we do? + +We'd probably do something like this: + +<!-- {35,39,44,47,49} --> +```jsx +const QUERY = gql` + query { + posts { + id + title + body + createdAt + } + } +` + +const Loading = () => <div>Loading...</div> + +const Empty = () => <div>No posts yet!</div> + +const Failure = ({ error }) => ( + <div>Error loading posts: {error.message}</div> +) + +const Success = ({ posts }) => { + return posts.map((post) => ( + <article> + <h2>{post.title}</h2> + <div>{post.body}</div> + </article> + )) +} + +const isEmpty = (data) => { + return isDataNull(data) || isDataEmptyArray(data) +} + +export const Cell = () => { + return ( + <Query query={QUERY}> + {({ error, loading, data }) => { + if (error) { + if (Failure) { + return <Failure error={error} /> + } else { + console.error(error) + } + } else if (loading) { + return <Loading /> + } else if (data) { + if (typeof Empty !== 'undefined' && isEmpty(data)) { + return <Empty /> + } else { + return <Success {...data} /> + } + } else { + throw 'Cannot render Cell: graphQL success but `data` is null' + } + }} + </Query> + ) +} +``` + +That's a lot of code. A lot of imperative code too. + +We're basically just dumping the contents of [createCell.tsx](https://github.com/redwoodjs/redwood/blob/main/packages/web/src/components/cell/createCell.tsx) into this file. Can you imagine having to do this every time you wanted to fetch data that might be delayed in responding? Yikes. diff --git a/docs/versioned_docs/version-7.0/cli-commands.md b/docs/versioned_docs/version-7.0/cli-commands.md new file mode 100644 index 000000000000..ca8e967aadbd --- /dev/null +++ b/docs/versioned_docs/version-7.0/cli-commands.md @@ -0,0 +1,2285 @@ +--- +description: A comprehensive reference of Redwood's CLI +--- + +# Command Line Interface + +The following is a comprehensive reference of the Redwood CLI. You can get a glimpse of all the commands by scrolling the aside to the right. + +The Redwood CLI has two entry-point commands: + +1. **redwood** (alias `rw`), which is for developing an application, and +2. **redwood-tools** (alias `rwt`), which is for contributing to the framework. + +This document covers the `redwood` command . For `redwood-tools`, see [Contributing](https://github.com/redwoodjs/redwood/blob/main/CONTRIBUTING.md#cli-reference-redwood-tools) in the Redwood repo. + +**A Quick Note on Syntax** + +We use [yargs](http://yargs.js.org/) and borrow its syntax here: + +``` +yarn redwood generate page <name> [path] --option +``` + +- `redwood g page` is the command. +- `<name>` and `[path]` are positional arguments. + - `<>` denotes a required argument. + - `[]` denotes an optional argument. +- `--option` is an option. + +Every argument and option has a type. Here `<name>` and `[path]` are strings and `--option` is a boolean. + +You'll also sometimes see arguments with trailing `..` like: + +``` +yarn redwood build [side..] +``` + +The `..` operator indicates that the argument accepts an array of values. See [Variadic Positional Arguments](https://github.com/yargs/yargs/blob/master/docs/advanced.md#variadic-positional-arguments). + +## create redwood-app + +Create a Redwood project using the yarn create command: + +``` +yarn create redwood-app <project directory> [option] +``` + +| Arguments & Options | Description | +| :--------------------- | :---------------------------------------------------------------------------------------------------------------------- | +| `project directory` | Specify the project directory [Required] | +| `--yarn-install` | Enables the yarn install step and version-requirement checks. You can pass `--no-yarn-install` to disable this behavior | +| `--typescript`, `--ts` | Generate a TypeScript project. JavaScript by default | +| `--overwrite` | Create the project even if the specified project directory isn't empty | +| `--no-telemetry` | Disable sending telemetry events for this create command and all Redwood CLI commands: https://telemetry.redwoodjs.com | +| `--yarn1` | Use yarn 1 instead of yarn 3 | +| `--git-init`, `--git` | Initialize a git repo during the install process, disabled by default | + +If you run into trouble during the yarn install step, which may happen if you're developing on an external drive and in other miscellaneous scenarios, try the `--yarn1` flag: + +``` +yarn create redwood-app my-redwood-project --yarn1 +``` + +## build + +Build for production. + +```bash +yarn redwood build [side..] +``` + +We use Babel to transpile the api side into `./api/dist` and Webpack to package the web side into `./web/dist`. + +| Arguments & Options | Description | +| :------------------ | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `side` | Which side(s) to build. Choices are `api` and `web`. Defaults to `api` and `web` | +| `--verbose, -v` | Print more information while building | + +#### Usage + +See [Builds](builds.md). + +#### Example + +Running `yarn redwood build` without any arguments generates the Prisma client and builds both sides of your project: + +```bash +~/redwood-app$ yarn redwood build +yarn run v1.22.4 +$ /redwood-app/node_modules/.bin/redwood build + ✔ Generating the Prisma client... + ✔ Building "api"... + ✔ Building "web"... +Done in 17.37s. +``` + +Files are output to each side's `dist` directory: + +```plaintext {2,6} +├── api +│ ├── dist +│ ├── prisma +│ └── src +└── web + ├── dist + ├── public + └── src +``` + +## check (alias diagnostics) + +Get structural diagnostics for a Redwood project (experimental). + +``` +yarn redwood check +``` + +#### Example + +```bash +~/redwood-app$ yarn redwood check +yarn run v1.22.4 +web/src/Routes.js:14:5: error: You must specify a 'notfound' page +web/src/Routes.js:14:19: error: Duplicate Path +web/src/Routes.js:15:19: error: Duplicate Path +web/src/Routes.js:17:40: error: Page component not found +web/src/Routes.js:17:19: error (INVALID_ROUTE_PATH_SYNTAX): Error: Route path contains duplicate parameter: "/{id}/{id}" +``` + +## console (alias c) + +Launch an interactive Redwood shell (experimental): + +- This has not yet been tested on Windows. +- The Prisma Client must be generated _prior_ to running this command, e.g. `yarn redwood prisma generate`. This is a known issue. + +``` +yarn redwood console +``` + +Right now, you can only use the Redwood console to interact with your database (always with `await`): + +#### Example + +```bash +~/redwood-app$ yarn redwood console +yarn run v1.22.4 +> await db.user.findMany() +> [ { id: 1, email: 'tom@redwoodjs.com', name: 'Tom' } ] +``` + +## data-migrate + +Data migration tools. + +```bash +yarn redwood data-migrate <command> +``` + +| Command | Description | +| :-------- | :------------------------------------------------------------------------------------------ | +| `install` | Appends `DataMigration` model to `schema.prisma`, creates `api/db/dataMigrations` directory | +| `up` | Executes outstanding data migrations | + +### data-migrate install + +- Appends a `DataMigration` model to `schema.prisma` for tracking which data migrations have already run. +- Creates a DB migration using `yarn redwood prisma migrate dev --create-only create_data_migrations`. +- Creates `api/db/dataMigrations` directory to contain data migration scripts + +```bash +yarn redwood data-migrate install +``` + +### data-migrate up + +Executes outstanding data migrations against the database. Compares the list of files in `api/db/dataMigrations` to the records in the `DataMigration` table in the database and executes any files not present. + +If an error occurs during script execution, any remaining scripts are skipped and console output will let you know the error and how many subsequent scripts were skipped. + +```bash +yarn redwood data-migrate up +``` + +## dev + +Start development servers for api and web. + +```bash +yarn redwood dev [side..] +``` + +`yarn redwood dev api` starts the Redwood dev server and `yarn redwood dev web` starts the Webpack dev server with Redwood's config. + +| Argument | Description | +| :----------------- | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `side` | Which dev server(s) to start. Choices are `api` and `web`. Defaults to `api` and `web` | +| `--forward, --fwd` | String of one or more Vite Dev Server config options. See example usage below | + +#### Usage + +If you're only working on your sdl and services, you can run just the api server to get GraphQL Playground on port 8911: + +```bash +~/redwood-app$ yarn redwood dev api +yarn run v1.22.4 +$ /redwood-app/node_modules/.bin/redwood dev api +$ /redwood-app/node_modules/.bin/dev-server +15:04:51 api | Listening on http://localhost:8911 +15:04:51 api | Watching /home/dominic/projects/redwood/redwood-app/api +15:04:51 api | +15:04:51 api | Now serving +15:04:51 api | +15:04:51 api | ► http://localhost:8911/graphql/ +``` + +Using `--forward` (alias `--fwd`), you can pass one or more Webpack Dev Server [config options](https://webpack.js.org/configuration/dev-server/). The following will run the dev server, set the port to `1234`, and disable automatic browser opening. + +```bash +~/redwood-app$ yarn redwood dev --fwd="--port=1234 --open=false" +``` + +You may need to access your dev application from a different host, like your mobile device or an SSH tunnel. To resolve the “Invalid Host Header” message, run the following: + +```bash +~/redwood-app$ yarn redwood dev --fwd="--allowed-hosts all" +``` + +For the full list of Webpack Dev Server settings, see [this documentation](https://webpack.js.org/configuration/dev-server/). + +For the full list of Server Configuration settings, see [this documentation](app-configuration-redwood-toml.md#api). + +## deploy + +Deploy your redwood project to a hosting provider target. + +**Netlify, Vercel, and Render** + +For hosting providers that auto deploy from Git, the deploy command runs the set of steps to build, apply production DB changes, and apply data migrations. In this context, it is often referred to as a Build Command. _Note: for Render, which uses traditional infrastructure, the command also starts Redwood's api server._ + +**AWS** + +This command runs the steps to both build your project _and_ deploy it to AWS. + +``` +yarn redwood deploy <target> +``` + +| Commands | Description | +| :---------------------------- | :--------------------------------------- | +| `serverless ` | Deploy to AWS using Serverless framework | +| `netlify [...commands]` | Build command for Netlify deploy | +| `render <side> [...commands]` | Build command for Render deploy | +| `vercel [...commands]` | Build command for Vercel deploy | + +### deploy serverless + +Deploy to AWS CloudFront and Lambda using [Serverless](https://www.serverless.com/) framework + +``` +yarn redwood deploy serverless +``` + +| Options & Arguments | Description | +| :------------------ | :------------------------------------------------------------------------------------------------------------------------------------------ | +| `--side` | which Side(s)to deploy [choices: "api", "web"] [default: "web","api"] | +| `--stage` | serverless stage, see [serverless stage docs](https://www.serverless.com/blog/stages-and-environments) [default: "production"] | +| `--pack-only` | Only package the build for deployment | +| `--first-run` | Use this flag the first time you deploy. The first deploy wizard will walk you through configuring your web side to connect to the api side | + + +### deploy netlify + +Build command for Netlify deploy + +``` +yarn redwood deploy netlify +``` + +| Options | Description | +| :--------------------- | :-------------------------------------------------- | +| `--build` | Build for production [default: "true"] | +| `--prisma` | Apply database migrations [default: "true"] | +| `--data-migrate, --dm` | Migrate the data in your database [default: "true"] | + +#### Example +The following command will build, apply Prisma DB migrations, and skip data migrations. + +``` +yarn redwood deploy netlify --no-data-migrate +``` + +:::warning +While you may be tempted to use the [Netlify CLI](https://cli.netlify.com) commands to [build](https://cli.netlify.com/commands/build) and [deploy](https://cli.netlify.com/commands/deploy) your project directly from you local project directory, doing so **will lead to errors when deploying and/or when running functions**. I.e. errors in the function needed for the GraphQL server, but also other serverless functions. + +The main reason for this is that these Netlify CLI commands simply build and deploy -- they build your project locally and then push the dist folder. That means that when building a RedwoodJS project, the [Prisma client is generated with binaries matching the operating system at build time](https://cli.netlify.com/commands/link) -- and not the [OS compatible](https://www.prisma.io/docs/reference/api-reference/prisma-schema-reference#binarytargets-options) with running functions on Netlify. Your Prisma client engine may be `darwin` for OSX or `windows` for Windows, but it needs to be `debian-openssl-1.1.x` or `rhel-openssl-1.1.x`. If the client is incompatible, your functions will fail. + +Therefore, please follow the [instructions in the Tutorial](tutorial/chapter4/deployment.md#netlify) to sync your GitHub (or other compatible source control service) repository with Netlify and allow their build and deploy system to manage deployments. + +The [Netlify CLI](https://cli.netlify.com) still works well for [linking your project to your site](https://cli.netlify.com/commands/link), testing local builds and also using their [dev](https://cli.netlify.com/commands/dev) or [dev --live](https://cli.netlify.com/commands/dev) to share your local dev server via a tunnel. +::: + +### deploy render + +Build (web) and Start (api) command for Render deploy. (For usage instructions, see the Render [Deploy Redwood](https://render.com/docs/deploy-redwood) doc.) + +``` +yarn redwood deploy render <side> +``` + +| Options & Arguments | Description | +| :--------------------- | :-------------------------------------------------- | +| `side` | select side to build [choices: "api", "web"] | +| `--prisma` | Apply database migrations [default: "true"] | +| `--data-migrate, --dm` | Migrate the data in your database [default: "true"] | +| `--serve` | Run server for api in production [default: "true"] | + +#### Example +The following command will build the Web side for static-site CDN deployment. + +``` +yarn redwood deploy render web +``` + +The following command will apply Prisma DB migrations, run data migrations, and start the api server. + +``` +yarn redwood deploy render api +``` + +### deploy vercel + +Build command for Vercel deploy + +``` +yarn redwood deploy vercel +``` + +| Options | Description | +| :--------------------- | :-------------------------------------------------- | +| `--build` | Build for production [default: "true"] | +| `--prisma` | Apply database migrations [default: "true"] | +| `--data-migrate, --dm` | Migrate the data in your database [default: "true"] | + +#### Example +The following command will build, apply Prisma DB migrations, and skip data migrations. + +``` +yarn redwood deploy vercel --no-data-migrate +``` + +## destroy (alias d) + +Rollback changes made by the generate command. + +``` +yarn redwood destroy <type> +``` + +| Command | Description | +| :------------------- | :------------------------------------------------------------------------------ | +| `cell <name>` | Destroy a cell component | +| `component <name>` | Destroy a component | +| `function <name>` | Destroy a Function | +| `layout <name>` | Destroy a layout component | +| `page <name> [path]` | Destroy a page and route component | +| `scaffold <model>` | Destroy pages, SDL, and Services files based on a given DB schema Model | +| `sdl <model>` | Destroy a GraphQL schema and service component based on a given DB schema Model | +| `service <name>` | Destroy a service component | +| `directive <name>` | Destroy a directive | + +## exec + +Execute scripts generated by [`yarn redwood generate script <name>`](#generate-script) to run one-off operations, long-running jobs, or utility scripts. + +#### Usage + +You can pass any flags to the command and use them within your script: + +``` +❯ yarn redwood exec syncStripeProducts foo --firstParam 'hello' --two 'world' + +[18:13:56] Generating Prisma client [started] +[18:13:57] Generating Prisma client [completed] +[18:13:57] Running script [started] +:: Executing script with args :: +{ _: [ 'exec', 'foo' ], firstParam: 'hello', two: 'world', '$0': 'rw' } +[18:13:58] Running script [completed] +✨ Done in 4.37s. +``` + +**Examples of CLI scripts:** + +- One-off scripts—such as syncing your Stripe products to your database +- A background worker you can off-load long running tasks +- Custom seed scripts for your application during development + +See [this how to](how-to/background-worker.md) for an example of using exec to run a background worker. + +## experimental (alias exp) + +Set up and run experimental features. + +Some caveats: +- these features do not follow SemVer (may be breaking changes in minor and patch releases) +- these features may be deprecated or removed (anytime) +- your feedback is wanted and necessary! + +For more information, including details about specific features, see this Redwood Forum category: +[Experimental Features](https://community.redwoodjs.com/c/experimental-features/25) + +**Available Experimental Features** +View all that can be _set up_: +``` +yarn redwood experimental --help +``` + +## generate (alias g) + +Save time by generating boilerplate code. + +``` +yarn redwood generate <type> +``` + +Some generators require that their argument be a model in your `schema.prisma`. When they do, their argument is named `<model>`. + +| Command | Description | +| ---------------------- | ----------------------------------------------------------------------------------------------------- | +| `cell <name>` | Generate a cell component | +| `component <name>` | Generate a component component | +| `dataMigration <name>` | Generate a data migration component | +| `dbAuth` | Generate sign in, sign up and password reset pages for dbAuth | +| `deploy <provider>` | Generate a deployment configuration | +| `function <name>` | Generate a Function | +| `layout <name>` | Generate a layout component | +| `page <name> [path]` | Generate a page component | +| `scaffold <model>` | Generate Pages, SDL, and Services files based on a given DB schema Model. Also accepts `<path/model>` | +| `sdl <model>` | Generate a GraphQL schema and service object | +| `secret` | Generate a secret key using a cryptographically-secure source of entropy | +| `service <name>` | Generate a service component | +| `types` | Generate types and supplementary code | +| `script <name>` | Generate a script that can use your services/libs to execute with `redwood exec script <name>` | + +### TypeScript generators + +If your project is configured for TypeScript (see the [TypeScript docs](typescript/index)), the generators will automatically detect and generate `.ts`/`.tsx` files for you + +**Undoing a Generator with a Destroyer** + +Most generate commands (i.e., everything but `yarn redwood generate dataMigration`) can be undone by their corresponding destroy command. For example, `yarn redwood generate cell` can be undone with `yarn redwood destroy cell`. + +### generate cell + +Generate a cell component. + +```bash +yarn redwood generate cell <name> +``` + +Cells are signature to Redwood. We think they provide a simpler and more declarative approach to data fetching. + +| Arguments & Options | Description | +| -------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `name` | Name of the cell | +| `--force, -f` | Overwrite existing files | +| `--typescript, --ts` | Generate TypeScript files Enabled by default if we detect your project is TypeScript | +| `--query` | Use this flag to specify a specific name for the GraphQL query. The query name must be unique | +| `--list` | Use this flag to generate a list cell. This flag is needed when dealing with irregular words whose plural and singular is identical such as equipment or pokemon | +| `--tests` | Generate test files [default: true] | +| `--stories` | Generate Storybook files [default: true] | +| `--rollback` | Rollback changes if an error occurs [default: true] | + +#### Usage + +The cell generator supports both single items and lists. See the [Single Item Cell vs List Cell](cells.md#single-item-cell-vs-list-cell) section of the Cell documentation. + +See the [Cells](tutorial/chapter2/cells.md) section of the Tutorial for usage examples. + +**Destroying** + +``` +yarn redwood destroy cell <name> +``` + +#### Example + +Generating a user cell: + +```bash +~/redwood-app$ yarn redwood generate cell user +yarn run v1.22.4 +$ /redwood-app/node_modules/.bin/redwood g cell user + ✔ Generating cell files... + ✔ Writing `./web/src/components/UserCell/UserCell.test.js`... + ✔ Writing `./web/src/components/UserCell/UserCell.js`... +Done in 1.00s. +``` + +A cell defines and exports four constants: `QUERY`, `Loading`, `Empty`, `Failure`, and `Success`: + +```jsx title="./web/src/components/UserCell/UserCell.js" +export const QUERY = gql` + query { + user { + id + } + } +` + +export const Loading = () => <div>Loading...</div> + +export const Empty = () => <div>Empty</div> + +export const Failure = ({ error }) => <div>Error: {error.message}</div> + +export const Success = ({ user }) => { + return JSON.stringify(user) +} +``` + +### generate component + +Generate a component. + +```bash +yarn redwood generate component <name> +``` + +Redwood loves function components and makes extensive use of React Hooks, which are only enabled in function components. + +| Arguments & Options | Description | +| -------------------- | ------------------------------------------------------------------------------------ | +| `name` | Name of the component | +| `--force, -f` | Overwrite existing files | +| `--typescript, --ts` | Generate TypeScript files Enabled by default if we detect your project is TypeScript | +| `--tests` | Generate test files [default: true] | +| `--stories` | Generate Storybook files [default: true] | +| `--rollback` | Rollback changes if an error occurs [default: true] | + +**Destroying** + +``` +yarn redwood destroy component <name> +``` + +#### Example + +Generating a user component: + +```bash +~/redwood-app$ yarn redwood generate component user +yarn run v1.22.4 +$ /redwood-app/node_modules/.bin/redwood g component user + ✔ Generating component files... + ✔ Writing `./web/src/components/User/User.test.js`... + ✔ Writing `./web/src/components/User/User.js`... +Done in 1.02s. +``` + +The component will export some jsx telling you where to find it. + +```jsx title="./web/src/components/User/User.js" +const User = () => { + return ( + <div> + <h2>{'User'}</h2> + <p>{'Find me in ./web/src/components/User/User.js'}</p> + </div> + ) +} + +export default User +``` + +### generate dataMigration + +Generate a data migration script. + +``` +yarn redwood generate dataMigration <name> +``` + +Creates a data migration script in `api/db/dataMigrations`. + +| Arguments & Options | Description | +| :------------------ | :----------------------------------------------------------------------- | +| `name` | Name of the data migration, prefixed with a timestamp at generation time | +| `--rollback` | Rollback changes if an error occurs [default: true] | + +#### Usage + +See the [Data Migration](data-migrations.md) docs. + +#### Usage + +See the [Deploy](/docs/deploy/introduction) docs. + +### generate dbAuth + +Generate log in, sign up, forgot password and password reset pages for dbAuth + +``` +yarn redwood generate dbAuth +``` + +| Arguments & Options | Description | +| ------------------- | ------------------------------------------------------------------------------------------------------------------------------------ | +| `--username-label` | The label to give the username field on the auth forms, e.g. "Email". Defaults to "Username". If not specified you will be prompted | +| `--password-label` | The label to give the password field on the auth forms, e.g. "Secret". Defaults to "Password". If not specified you will be prompted | +| `--webAuthn` | Whether or not to add webAuthn support to the log in page. If not specified you will be prompted | +| `--rollback` | Rollback changes if an error occurs [default: true] | + +If you don't want to create your own log in, sign up, forgot password and +password reset pages from scratch you can use this generator. The pages will be +available at /login, /signup, /forgot-password, and /reset-password. Check the +post-install instructions for one change you need to make to those pages: where +to redirect the user to once their log in/sign up is successful. + +If you'd rather create your own, you might want to start from the generated +pages anyway as they'll contain the other code you need to actually submit the +log in credentials or sign up fields to the server for processing. + +### generate directive + +Generate a directive. + +```bash +yarn redwood generate directive <name> +``` + +| Arguments & Options | Description | +| -------------------- | --------------------------------------------------------------------- | +| `name` | Name of the directive | +| `--force, -f` | Overwrite existing files | +| `--typescript, --ts` | Generate TypeScript files (defaults to your projects language target) | +| `--type` | Directive type [Choices: "validator", "transformer"] | +| `--rollback` | Rollback changes if an error occurs [default: true] | + +#### Usage + +See [Redwood Directives](directives.md). + +**Destroying** + +``` +yarn redwood destroy directive <name> +``` + +#### Example + +Generating a `myDirective` directive using the interactive command: + +```bash +yarn rw g directive myDirective + +? What type of directive would you like to generate? › - Use arrow-keys. Return to submit. +❯ Validator - Implement a validation: throw an error if criteria not met to stop execution + Transformer - Modify values of fields or query responses +``` + +### generate function + +Generate a Function. + +``` +yarn redwood generate function <name> +``` + +Not to be confused with Javascript functions, Capital-F Functions are meant to be deployed to serverless endpoints like AWS Lambda. + +| Arguments & Options | Description | +| -------------------- | ------------------------------------------------------------------------------------ | +| `name` | Name of the function | +| `--force, -f` | Overwrite existing files | +| `--typescript, --ts` | Generate TypeScript files Enabled by default if we detect your project is TypeScript | +| `--rollback` | Rollback changes if an error occurs [default: true] | + +#### Usage + +See the [Custom Function](how-to/custom-function.md) how to. + +**Destroying** + +``` +yarn redwood destroy function <name> +``` + +#### Example + +Generating a user function: + +```bash +~/redwood-app$ yarn redwood generate function user +yarn run v1.22.4 +$ /redwood-app/node_modules/.bin/redwood g function user + ✔ Generating function files... + ✔ Writing `./api/src/functions/user.js`... +Done in 16.04s. +``` + +Functions get passed `context` which provides access to things like the current user: + +```jsx title="./api/src/functions/user.js" +export const handler = async (event, context) => { + return { + statusCode: 200, + body: `user function`, + } +} +``` + +Now if we run `yarn redwood dev api`: + +```plaintext {11} +~/redwood-app$ yarn redwood dev api +yarn run v1.22.4 +$ /redwood-app/node_modules/.bin/redwood dev api +$ /redwood-app/node_modules/.bin/dev-server +17:21:49 api | Listening on http://localhost:8911 +17:21:49 api | Watching /home/dominic/projects/redwood/redwood-app/api +17:21:49 api | +17:21:49 api | Now serving +17:21:49 api | +17:21:49 api | ► http://localhost:8911/graphql/ +17:21:49 api | ► http://localhost:8911/user/ +``` + +### generate layout + +Generate a layout component. + +```bash +yarn redwood generate layout <name> +``` + +Layouts wrap pages and help you stay DRY. + +| Arguments & Options | Description | +| -------------------- | ------------------------------------------------------------------------------------ | +| `name` | Name of the layout | +| `--force, -f` | Overwrite existing files | +| `--typescript, --ts` | Generate TypeScript files Enabled by default if we detect your project is TypeScript | +| `--tests` | Generate test files [default: true] | +| `--stories` | Generate Storybook files [default: true] | +| `--skipLink` | Generate a layout with a skip link [default: false] | +| `--rollback` | Rollback changes if an error occurs [default: true] | + +#### Usage + +See the [Layouts](tutorial/chapter1/layouts.md) section of the tutorial. + +**Destroying** + +``` +yarn redwood destroy layout <name> +``` + +#### Example + +Generating a user layout: + +```bash +~/redwood-app$ yarn redwood generate layout user +yarn run v1.22.4 +$ /redwood-app/node_modules/.bin/redwood g layout user + ✔ Generating layout files... + ✔ Writing `./web/src/layouts/UserLayout/UserLayout.test.js`... + ✔ Writing `./web/src/layouts/UserLayout/UserLayout.js`... +Done in 1.00s. +``` + +A layout will just export its children: + +```jsx title="./web/src/layouts/UserLayout/UserLayout.test.js" +const UserLayout = ({ children }) => { + return <>{children}</> +} + +export default UserLayout +``` + +### generate model + +Generate a RedwoodRecord model. + +```bash +yarn redwood generate model <name> +``` + +| Arguments & Options | Description | +| ------------------- | --------------------------------------------------- | +| `name` | Name of the model (in schema.prisma) | +| `--force, -f` | Overwrite existing files | +| `--rollback` | Rollback changes if an error occurs [default: true] | + +#### Usage + +See the [RedwoodRecord docs](redwoodrecord.md). + +#### Example + +```bash +~/redwood-app$ yarn redwood generate model User +yarn run v1.22.4 +$ /redwood-app/node_modules/.bin/redwood g model User + ✔ Generating model file... + ✔ Successfully wrote file `./api/src/models/User.js` + ✔ Parsing datamodel, generating api/src/models/index.js... + + Wrote /Users/rob/Sites/redwoodjs/redwood_record/.redwood/datamodel.json + Wrote /Users/rob/Sites/redwoodjs/redwood_record/api/src/models/index.js + +✨ Done in 3.74s. +``` + +Generating a model automatically runs `yarn rw record init` as well. + +### generate page + +Generates a page component and updates the routes. + +```bash +yarn redwood generate page <name> [path] +``` + +`path` can include a route parameter which will be passed to the generated +page. The syntax for that is `/path/to/page/{routeParam}/more/path`. You can +also specify the type of the route parameter if needed: `{routeParam:Int}`. If +`path` isn't specified, or if it's just a route parameter, it will be derived +from `name` and the route parameter, if specified, will be added to the end. + +This also updates `Routes.js` in `./web/src`. + +| Arguments & Options | Description | +| -------------------- | ------------------------------------------------------------------------------------ | +| `name` | Name of the page | +| `path` | URL path to the page. Defaults to `name` | +| `--force, -f` | Overwrite existing files | +| `--typescript, --ts` | Generate TypeScript files Enabled by default if we detect your project is TypeScript | +| `--tests` | Generate test files [default: true] | +| `--stories` | Generate Storybook files [default: true] | +| `--rollback` | Rollback changes if an error occurs [default: true] | + +**Destroying** + +``` +yarn redwood destroy page <name> [path] +``` + +**Examples** + +Generating a home page: + +```plaintext +~/redwood-app$ yarn redwood generate page home / +yarn run v1.22.4 +$ /redwood-app/node_modules/.bin/redwood g page home / + ✔ Generating page files... + ✔ Writing `./web/src/pages/HomePage/HomePage.test.js`... + ✔ Writing `./web/src/pages/HomePage/HomePage.js`... + ✔ Updating routes file... +Done in 1.02s. +``` + +The page returns jsx telling you where to find it: + +```jsx title="./web/src/pages/HomePage/HomePage.js" +const HomePage = () => { + return ( + <div> + <h1>HomePage</h1> + <p>Find me in ./web/src/pages/HomePage/HomePage.js</p> + </div> + ) +} + +export default HomePage +``` + +And the route is added to `Routes.js`: + +```jsx {6} title="./web/src/Routes.js" +const Routes = () => { + return ( + <Router> + <Route path="/" page={HomePage} name="home" /> + <Route notfound page={NotFoundPage} /> + </Router> + ) +} +``` + +Generating a page to show quotes: + +```plaintext +~/redwood-app$ yarn redwood generate page quote {id} +yarn run v1.22.4 +$ /redwood-app/node_modules/.bin/redwood g page quote {id} + ✔ Generating page files... + ✔ Writing `./web/src/pages/QuotePage/QuotePage.stories.js`... + ✔ Writing `./web/src/pages/QuotePage/QuotePage.test.js`... + ✔ Writing `./web/src/pages/QuotePage/QuotePage.js`... + ✔ Updating routes file... +Done in 1.02s. +``` + +The generated page will get the route parameter as a prop: + +```jsx {5,12,14} title="./web/src/pages/QuotePage/QuotePage.js" +import { Link, routes } from '@redwoodjs/router' + +const QuotePage = ({ id }) => { + return ( + <> + <h1>QuotePage</h1> + <p>Find me in "./web/src/pages/QuotePage/QuotePage.js"</p> + <p> + My default route is named "quote", link to me with ` + <Link to={routes.quote({ id: 42 })}>Quote 42</Link>` + </p> + <p>The parameter passed to me is {id}</p> + </> + ) +} + +export default QuotePage +``` + +And the route is added to `Routes.js`, with the route parameter added: + +```jsx {6} title="./web/src/Routes.js" +const Routes = () => { + return ( + <Router> + <Route path="/quote/{id}" page={QuotePage} name="quote" /> + <Route notfound page={NotFoundPage} /> + </Router> + ) +} +``` +### generate realtime + +Generate a boilerplate subscription or live query used with RedwoodJS Realtime. + +```bash +yarn redwood generate realtime <name> +``` + +| Arguments & Options | Description | +| -------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `name` | Name of the realtime event to setup.post` | +| `-t, --type` | Choices: `liveQuery`, `subscription`. Optional. If not provided, you will be prompted to select. +| `--force, -f` | Overwrite existing files + + +#### Usage + +See Realtime for more information on how to [setup RedwoodJS Realtime](#setup-realtime) and use Live Queries, and Subscriptions. + +**Examples** + +Generate a live query. + +```bash +~/redwood-app$ yarn rw g realtime NewLiveQuery +? What type of realtime event would you like to create? › - Use arrow-keys. Return to submit. +❯ Live Query + Create a Live Query to watch for changes in data + Subscription + +✔ What type of realtime event would you like to create? › Live Query +✔ Checking for realtime environment prerequisites ... +✔ Adding newlivequery example live query ... +✔ Generating types ... +``` + +Generate a subscription. + +```bash +~/redwood-app$ yarn rw g realtime NewSub +? What type of realtime event would you like to create? › - Use arrow-keys. Return to submit. + Live Query +❯ Subscription - Create a Subscription to watch for events + +✔ What type of realtime event would you like to create? › Subscription +✔ Checking for realtime environment prerequisites ... +✔ Adding newsub example subscription ... +✔ Generating types ... +``` + + +### generate scaffold + +Generate Pages, SDL, and Services files based on a given DB schema Model. Also accepts `<path/model>`. + +```bash +yarn redwood generate scaffold <model> +``` + +A scaffold quickly creates a CRUD for a model by generating the following files and corresponding routes: + +- sdl +- service +- layout +- pages +- cells +- components + +The content of the generated components is different from what you'd get by running them individually. + +| Arguments & Options | Description | +| -------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `model` | Model to scaffold. You can also use `<path/model>` to nest files by type at the given path directory (or directories). For example, `redwood g scaffold admin/post` | +| `--docs` | Use or set to `true` to generated comments in SDL to use in self-documentating your app's GraphQL API. See: [Self-Documenting GraphQL API](./graphql.md#self-documenting-graphql-api) [default:false] | +| `--force, -f` | Overwrite existing files | +| `--tailwind` | Generate TailwindCSS version of scaffold.css (automatically set to `true` if TailwindCSS config exists) | +| `--typescript, --ts` | Generate TypeScript files Enabled by default if we detect your project is TypeScript | +| `--rollback` | Rollback changes if an error occurs [default: true] | + +#### Usage + +See [Creating a Post Editor](tutorial/chapter2/getting-dynamic.md#creating-a-post-editor). + +**Nesting of Components and Pages** + +By default, redwood will nest the components and pages in a directory named as per the model. For example (where `post` is the model): +`yarn rw g scaffold post` +will output the following files, with the components and pages nested in a `Post` directory: + +```plaintext {9-20} + √ Generating scaffold files... + √ Successfully wrote file `./api/src/graphql/posts.sdl.js` + √ Successfully wrote file `./api/src/services/posts/posts.js` + √ Successfully wrote file `./api/src/services/posts/posts.scenarios.js` + √ Successfully wrote file `./api/src/services/posts/posts.test.js` + √ Successfully wrote file `./web/src/layouts/PostsLayout/PostsLayout.js` + √ Successfully wrote file `./web/src/pages/Post/EditPostPage/EditPostPage.js` + √ Successfully wrote file `./web/src/pages/Post/PostPage/PostPage.js` + √ Successfully wrote file `./web/src/pages/Post/PostsPage/PostsPage.js` + √ Successfully wrote file `./web/src/pages/Post/NewPostPage/NewPostPage.js` + √ Successfully wrote file `./web/src/components/Post/EditPostCell/EditPostCell.js` + √ Successfully wrote file `./web/src/components/Post/Post/Post.js` + √ Successfully wrote file `./web/src/components/Post/PostCell/PostCell.js` + √ Successfully wrote file `./web/src/components/Post/PostForm/PostForm.js` + √ Successfully wrote file `./web/src/components/Post/Posts/Posts.js` + √ Successfully wrote file `./web/src/components/Post/PostsCell/PostsCell.js` + √ Successfully wrote file `./web/src/components/Post/NewPost/NewPost.js` + √ Adding layout import... + √ Adding set import... + √ Adding scaffold routes... + √ Adding scaffold asset imports... +``` + +If it is not desired to nest the components and pages, then redwood provides an option that you can set to disable this for your project. +Add the following in your `redwood.toml` file to disable the nesting of components and pages. + +``` +[generate] + nestScaffoldByModel = false +``` + +Setting the `nestScaffoldByModel = true` will retain the default behavior, but is not required. + +Notes: + +1. The nesting directory is always set to be PascalCase. + +**Namespacing Scaffolds** + +You can namespace your scaffolds by providing `<path/model>`. The layout, pages, cells, and components will be nested in newly created dir(s). In addition, the nesting folder, based upon the model name, is still applied after the path for components and pages, unless turned off in the `redwood.toml` as described above. For example, given a model `user`, running `yarn redwood generate scaffold admin/user` will nest the layout, pages, and components in a newly created `Admin` directory created for each of the `layouts`, `pages`, and `components` folders: + +```plaintext {9-20} +~/redwood-app$ yarn redwood generate scaffold admin/user +yarn run v1.22.4 +$ /redwood-app/node_modules/.bin/redwood g scaffold admin/user + ✔ Generating scaffold files... + ✔ Successfully wrote file `./api/src/graphql/users.sdl.js` + ✔ Successfully wrote file `./api/src/services/users/users.js` + ✔ Successfully wrote file `./api/src/services/users/users.scenarios.js` + ✔ Successfully wrote file `./api/src/services/users/users.test.js` + ✔ Successfully wrote file `./web/src/layouts/Admin/UsersLayout/UsersLayout.js` + ✔ Successfully wrote file `./web/src/pages/Admin/User/EditUserPage/EditUserPage.js` + ✔ Successfully wrote file `./web/src/pages/Admin/User/UserPage/UserPage.js` + ✔ Successfully wrote file `./web/src/pages/Admin/User/UsersPage/UsersPage.js` + ✔ Successfully wrote file `./web/src/pages/Admin/User/NewUserPage/NewUserPage.js` + ✔ Successfully wrote file `./web/src/components/Admin/User/EditUserCell/EditUserCell.js` + ✔ Successfully wrote file `./web/src/components/Admin/User/User/User.js` + ✔ Successfully wrote file `./web/src/components/Admin/User/UserCell/UserCell.js` + ✔ Successfully wrote file `./web/src/components/Admin/User/UserForm/UserForm.js` + ✔ Successfully wrote file `./web/src/components/Admin/User/Users/Users.js` + ✔ Successfully wrote file `./web/src/components/Admin/User/UsersCell/UsersCell.js` + ✔ Successfully wrote file `./web/src/components/Admin/User/NewUser/NewUser.js` + ✔ Adding layout import... + ✔ Adding set import... + ✔ Adding scaffold routes... + ✔ Adding scaffold asset imports... +Done in 1.21s. +``` + +The routes wrapped in the [`Set`](router.md#sets-of-routes) component with generated layout will be nested too: + +```jsx {6-11} title="./web/src/Routes.js" +const Routes = () => { + return ( + <Router> + <Set wrap={UsersLayout}> + <Route path="/admin/users/new" page={AdminUserNewUserPage} name="adminNewUser" /> + <Route path="/admin/users/{id:Int}/edit" page={AdminUserEditUserPage} name="adminEditUser" /> + <Route path="/admin/users/{id:Int}" page={AdminUserUserPage} name="adminUser" /> + <Route path="/admin/users" page={AdminUserUsersPage} name="adminUsers" /> + </Set> + <Route notfound page={NotFoundPage} /> + </Router> + ) +} +``` + +Notes: + +1. Each directory in the scaffolded path is always set to be PascalCase. +2. The scaffold path may be multiple directories deep. + +**Destroying** + +``` +yarn redwood destroy scaffold <model> +``` + +Notes: + +1. You can also use `<path/model>` to destroy files that were generated under a scaffold path. For example, `redwood d scaffold admin/post` +2. The destroy command will remove empty folders along the path, provided they are lower than the folder level of component, layout, page, etc. +3. The destroy scaffold command will also follow the `nestScaffoldbyModel` setting in the `redwood.toml` file. For example, if you have an existing scaffold that you wish to destroy, that does not have the pages and components nested by the model name, you can destroy the scaffold by temporarily setting: + +``` +[generate] + nestScaffoldByModel = false +``` + +**Troubleshooting** + +If you see `Error: Unknown type: ...`, don't panic! +It's a known limitation with GraphQL type generation. +It happens when you generate the SDL of a Prisma model that has relations **before the SDL for the related model exists**. +Please see [Troubleshooting Generators](./schema-relations#troubleshooting-generators) for help. + +### generate script + +Generates an arbitrary Node.js script in `./scripts/<name>` that can be used with `redwood execute` command later. + +| Arguments & Options | Description | +| -------------------- | ------------------------------------------------------------------------------------ | +| `name` | Name of the service | +| `--typescript, --ts` | Generate TypeScript files Enabled by default if we detect your project is TypeScript | +| `--rollback` | Rollback changes if an error occurs [default: true] | + +Scripts have access to services and libraries used in your project. Some examples of how this can be useful: + +- create special database seed scripts for different scenarios +- sync products and prices from your payment provider +- running cleanup jobs on a regular basis e.g. delete stale/expired data +- sync data between platforms e.g. email from your db to your email marketing platform + +#### Usage + +``` +❯ yarn rw g script syncStripeProducts + + ✔ Generating script file... + ✔ Successfully wrote file `./scripts/syncStripeProducts.ts` + ✔ Next steps... + + After modifying your script, you can invoke it like: + + yarn rw exec syncStripeProducts + + yarn rw exec syncStripeProducts --param1 true +``` + +### generate sdl + +Generate a GraphQL schema and service object. + +```bash +yarn redwood generate sdl <model> +``` + +The sdl will inspect your `schema.prisma` and will do its best with relations. Schema to generators isn't one-to-one yet (and might never be). + +<!-- See limited generator support for relations +https://community.redwoodjs.com/t/prisma-beta-2-and-redwoodjs-limited-generator-support-for-relations-with-workarounds/361 --> + +| Arguments & Options | Description | +| -------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `model` | Model to generate the sdl for | +| `--crud` | Set to `false`, or use `--no-crud`, if you do not want to generate mutations | +| `--docs` | Use or set to `true` to generated comments in SDL to use in self-documentating your app's GraphQL API. See: [Self-Documenting GraphQL API](./graphql.md#self-documenting-graphql-api) [default: false] | +| `--force, -f` | Overwrite existing files | +| `--tests` | Generate service test and scenario [default: true] | +| `--typescript, --ts` | Generate TypeScript files Enabled by default if we detect your project is TypeScript | +| `--rollback` | Rollback changes if an error occurs [default: true] | + +> **Note:** The generated sdl will include the `@requireAuth` directive by default to ensure queries and mutations are secure. If your app's queries and mutations are all public, you can set up a custom SDL generator template to apply `@skipAuth` (or a custom validator directive) to suit you application's needs. + +**Regenerating the SDL** + +Often, as you iterate on your data model, you may add, remove, or rename fields. You still want Redwood to update the generated SDL and service files for those updates because it saves time not having to make those changes manually. + +But, since the `generate` command prevents you from overwriting files accidentally, you use the `--force` option -- but a `force` will reset any test and scenarios you may have written which you don't want to lose. + +In that case, you can run the following to "regenerate" **just** the SDL file and leave your tests and scenarios intact and not lose your hard work. + +``` +yarn redwood g sdl <model> --force --no-tests +``` + +#### Example + +```bash +~/redwood-app$ yarn redwood generate sdl user --force --no-tests +yarn run v1.22.4 +$ /redwood-app/node_modules/.bin/redwood g sdl user + ✔ Generating SDL files... + ✔ Writing `./api/src/graphql/users.sdl.js`... + ✔ Writing `./api/src/services/users/users.js`... +Done in 1.04s. +``` + +**Destroying** + +``` +yarn redwood destroy sdl <model> +``` + +#### Example + +Generating a user sdl: + +```bash +~/redwood-app$ yarn redwood generate sdl user +yarn run v1.22.4 +$ /redwood-app/node_modules/.bin/redwood g sdl user + ✔ Generating SDL files... + ✔ Writing `./api/src/graphql/users.sdl.js`... + ✔ Writing `./api/src/services/users/users.scenarios.js`... + ✔ Writing `./api/src/services/users/users.test.js`... + ✔ Writing `./api/src/services/users/users.js`... +Done in 1.04s. +``` + +The generated sdl defines a corresponding type, query, create/update inputs, and any mutations. To prevent defining mutations, add the `--no-crud` option. + +```jsx title="./api/src/graphql/users.sdl.js" +export const schema = gql` + type User { + id: Int! + email: String! + name: String + } + + type Query { + users: [User!]! @requireAuth + } + + input CreateUserInput { + email: String! + name: String + } + + input UpdateUserInput { + email: String + name: String + } + + type Mutation { + createUser(input: CreateUserInput!): User! @requireAuth + updateUser(id: Int!, input: UpdateUserInput!): User! @requireAuth + deleteUser(id: Int!): User! @requireAuth + } +` +``` + +The services file fulfills the query. If the `--no-crud` option is added, this file will be less complex. + +```jsx title="./api/src/services/users/users.js" +import { db } from 'src/lib/db' + +export const users = () => { + return db.user.findMany() +} +``` + +For a model with a relation, the field will be listed in the sdl: + +```jsx {8} title="./api/src/graphql/users.sdl.js" +export const schema = gql` + type User { + id: Int! + email: String! + name: String + profile: Profile + } + + type Query { + users: [User!]! @requireAuth + } + + input CreateUserInput { + email: String! + name: String + } + + input UpdateUserInput { + email: String + name: String + } + + type Mutation { + createUser(input: CreateUserInput!): User! @requireAuth + updateUser(id: Int!, input: UpdateUserInput!): User! @requireAuth + deleteUser(id: Int!): User! @requireAuth + } +` +``` + +And the service will export an object with the relation as a property: + +```jsx {9-13} title="./api/src/services/users/users.js" +import { db } from 'src/lib/db' + +export const users = () => { + return db.user.findMany() +} + +export const User = { + profile: (_obj, { root }) => { + db.user.findUnique({ where: { id: root.id } }).profile(), + } +} +``` + +**Troubleshooting** + +If you see `Error: Unknown type: ...`, don't panic! +It's a known limitation with GraphQL type generation. +It happens when you generate the SDL of a Prisma model that has relations **before the SDL for the related model exists**. +Please see [Troubleshooting Generators](./schema-relations#troubleshooting-generators) for help. + +### generate secret + +Generate a secret key using a cryptographically-secure source of entropy. Commonly used when setting up dbAuth. + +| Arguments & Options | Description | +| :------------------ | :------------------------------------------------- | +| `--raw` | Print just the key, without any informational text | + +#### Usage + +Using the `--raw` option you can easily append a secret key to your .env file, like so: + +``` +# yarn v1 +echo "SESSION_SECRET=$(yarn --silent rw g secret --raw)" >> .env + +# yarn v3 +echo "SESSION_SECRET=$(yarn rw g secret --raw)" >> .env +``` + +### generate service + +Generate a service component. + +```bash +yarn redwood generate service <name> +``` + +Services are where Redwood puts its business logic. They can be used by your GraphQL API or any other place in your backend code. See [How Redwood Works with Data](tutorial/chapter2/side-quest.md). + +| Arguments & Options | Description | +| -------------------- | ------------------------------------------------------------------------------------ | +| `name` | Name of the service | +| `--force, -f` | Overwrite existing files | +| `--typescript, --ts` | Generate TypeScript files Enabled by default if we detect your project is TypeScript | +| `--tests` | Generate test and scenario files [default: true] | +| `--rollback` | Rollback changes if an error occurs [default: true] | + + +**Destroying** + +``` +yarn redwood destroy service <name> +``` + +#### Example + +Generating a user service: + +```bash +~/redwood-app$ yarn redwood generate service user +yarn run v1.22.4 +$ /redwood-app/node_modules/.bin/redwood g service user + ✔ Generating service files... + ✔ Writing `./api/src/services/users/users.scenarios.js`... + ✔ Writing `./api/src/services/users/users.test.js`... + ✔ Writing `./api/src/services/users/users.js`... +Done in 1.02s. +``` + +The generated service component will export a `findMany` query: + +```jsx title="./api/src/services/users/users.js" +import { db } from 'src/lib/db' + +export const users = () => { + return db.user.findMany() +} +``` + +### generate types + +Generates supplementary code (project types) + +```bash +yarn redwood generate types +``` + +#### Usage + +``` +~/redwood-app$ yarn redwood generate types +yarn run v1.22.10 +$ /redwood-app/node_modules/.bin/redwood g types +$ /redwood-app/node_modules/.bin/rw-gen + +Generating... + +- .redwood/schema.graphql +- .redwood/types/mirror/api/src/services/posts/index.d.ts +- .redwood/types/mirror/web/src/components/BlogPost/index.d.ts +- .redwood/types/mirror/web/src/layouts/BlogLayout/index.d.ts +... +- .redwood/types/mirror/web/src/components/Post/PostsCell/index.d.ts +- .redwood/types/includes/web-routesPages.d.ts +- .redwood/types/includes/all-currentUser.d.ts +- .redwood/types/includes/web-routerRoutes.d.ts +- .redwood/types/includes/api-globImports.d.ts +- .redwood/types/includes/api-globalContext.d.ts +- .redwood/types/includes/api-scenarios.d.ts +- api/types/graphql.d.ts +- web/types/graphql.d.ts + +... and done. +``` + +## info + +Print your system environment information. + +```bash +yarn redwood info +``` + +This command's primarily intended for getting information others might need to know to help you debug: + +```bash +~/redwood-app$ yarn redwood info +yarn run v1.22.4 +$ /redwood-app/node_modules/.bin/redwood info + + System: + OS: Linux 5.4 Ubuntu 20.04 LTS (Focal Fossa) + Shell: 5.0.16 - /usr/bin/bash + Binaries: + Node: 13.12.0 - /tmp/yarn--1589998865777-0.9683603763419713/node + Yarn: 1.22.4 - /tmp/yarn--1589998865777-0.9683603763419713/yarn + Browsers: + Chrome: 78.0.3904.108 + Firefox: 76.0.1 + npmPackages: + @redwoodjs/core: ^0.7.0-rc.3 => 0.7.0-rc.3 + +Done in 1.98s. +``` + +## lint + +Lint your files. + +```bash +yarn redwood lint +``` + +[Our ESLint configuration](https://github.com/redwoodjs/redwood/blob/master/packages/eslint-config/index.js) is a mix of [ESLint's recommended rules](https://eslint.org/docs/rules/), [React's recommended rules](https://www.npmjs.com/package/eslint-plugin-react#list-of-supported-rules), and a bit of our own stylistic flair: + +- no semicolons +- comma dangle when multiline +- single quotes +- always use parenthesis around arrow functions +- enforced import sorting + +| Option | Description | +| :------ | :---------------- | +| `--fix` | Try to fix errors | + +## prisma + +Run Prisma CLI within the context of a Redwood project. + +``` +yarn redwood prisma +``` + +Redwood's `prisma` command is a lightweight wrapper around the Prisma CLI. It's the primary way you interact with your database. + +> **What do you mean it's a lightweight wrapper?** +> +> By lightweight wrapper, we mean that we're handling some flags under the hood for you. +> You can use the Prisma CLI directly (`yarn prisma`), but letting Redwood act as a proxy (`yarn redwood prisma`) saves you a lot of keystrokes. +> For example, Redwood adds the `--schema=api/db/schema.prisma` flags automatically. +> +> If you want to know exactly what `yarn redwood prisma <command>` runs, which flags it's passing, etc., it's right at the top: +> +> ```sh{3} +> $ yarn redwood prisma migrate dev +> yarn run v1.22.10 +> $ ~/redwood-app/node_modules/.bin/redwood prisma migrate dev +> Running prisma cli: +> yarn prisma migrate dev --schema "~/redwood-app/api/db/schema.prisma" +> ... +> ``` + +Since `yarn redwood prisma` is just an entry point into all the database commands that the Prisma CLI has to offer, we won't try to provide an exhaustive reference of everything you can do with it here. Instead what we'll do is focus on some of the most common commands; those that you'll be running on a regular basis, and how they fit into Redwood's workflows. + +For the complete list of commands, see the [Prisma CLI Reference](https://www.prisma.io/docs/reference/api-reference/command-reference). It's the authority. + +Along with the CLI reference, bookmark Prisma's [Migration Flows](https://www.prisma.io/docs/concepts/components/prisma-migrate/prisma-migrate-flows) doc—it'll prove to be an invaluable resource for understanding `yarn redwood prisma migrate`. + +| Command | Description | +| :------------------ | :----------------------------------------------------------- | +| `db <command>` | Manage your database schema and lifecycle during development | +| `generate` | Generate artifacts (e.g. Prisma Client) | +| `migrate <command>` | Update the database schema with migrations | + +### prisma db + +Manage your database schema and lifecycle during development. + +``` +yarn redwood prisma db <command> +``` + +The `prisma db` namespace contains commands that operate directly against the database. + +#### prisma db pull + +Pull the schema from an existing database, updating the Prisma schema. + +> 👉 Quick link to the [Prisma CLI Reference](https://www.prisma.io/docs/reference/api-reference/command-reference#db-pull). + +``` +yarn redwood prisma db pull +``` + +This command, formerly `introspect`, connects to your database and adds Prisma models to your Prisma schema that reflect the current database schema. + +> Warning: The command will Overwrite the current schema.prisma file with the new schema. Any manual changes or customization will be lost. Be sure to back up your current schema.prisma file before running `db pull` if it contains important modifications. + +#### prisma db push + +Push the state from your Prisma schema to your database. + +> 👉 Quick link to the [Prisma CLI Reference](https://www.prisma.io/docs/reference/api-reference/command-reference#db-push). + +``` +yarn redwood prisma db push +``` + +This is your go-to command for prototyping changes to your Prisma schema (`schema.prisma`). +Prior to to `yarn redwood prisma db push`, there wasn't a great way to try out changes to your Prisma schema without creating a migration. +This command fills the void by "pushing" your `schema.prisma` file to your database without creating a migration. You don't even have to run `yarn redwood prisma generate` afterward—it's all taken care of for you, making it ideal for iterative development. + +#### prisma db seed + +Seed your database. + +> 👉 Quick link to the [Prisma CLI Reference](https://www.prisma.io/docs/reference/api-reference/command-reference#db-seed-preview). + +``` +yarn redwood prisma db seed +``` + +This command seeds your database by running your project's `seed.js|ts` file which you can find in your `scripts` directory. + +Prisma's got a great [seeding guide](https://www.prisma.io/docs/guides/prisma-guides/seed-database) that covers both the concepts and the nuts and bolts. + +> **Important:** Prisma Migrate also triggers seeding in the following scenarios: +> +> - you manually run the `yarn redwood prisma migrate reset` command +> - the database is reset interactively in the context of using `yarn redwood prisma migrate dev`—for example, as a result of migration history conflicts or database schema drift +> +> If you want to use `yarn redwood prisma migrate dev` or `yarn redwood prisma migrate reset` without seeding, you can pass the `--skip-seed` flag. + +While having a great seed might not be all that important at the start, as soon as you start collaborating with others, it becomes vital. + +**How does seeding actually work?** + +If you look at your project's `package.json` file, you'll notice a `prisma` section: + +```json + "prisma": { + "seed": "yarn rw exec seed" + }, +``` + +Prisma runs any command found in the `seed` setting when seeding via `yarn rw prisma db seed` or `yarn rw prisma migrate reset`. +Here we're using the Redwood [`exec` cli command](#exec) that runs a script. + +If you wanted to seed your database using a different method (like `psql` and an `.sql` script), you can do so by changing the "seed" script command. + +**More About Seeding** + +In addition, you can [code along with Ryan Chenkie](https://www.youtube.com/watch?v=2LwTUIqjbPo), and learn how libraries like [faker](https://www.npmjs.com/package/faker) can help you create a large, realistic database fast, especially in tandem with Prisma's [createMany](https://www.prisma.io/docs/reference/api-reference/prisma-client-reference#createmany). + +<!-- ### generate --> + +<!-- Generate artifacts (e.g. Prisma Client). --> + +<!-- > 👉 Quick link to the [Prisma CLI Reference](https://www.prisma.io/docs/reference/api-reference/command-reference#generate). --> + +<!-- ``` --> +<!-- yarn redwood prisma generate --> +<!-- ``` --> + +**Log Formatting** + +If you use the Redwood Logger as part of your seed script, you can pipe the command to the LogFormatter to output prettified logs. + +For example, if your `scripts.seed.js` imports the `logger`: + +```jsx title="scripts/seed.js" +import { db } from 'api/src/lib/db' +import { logger } from 'api/src/lib/logger' + +export default async () => { + try { + const posts = [ + { + title: 'Welcome to the blog!', + body: "I'm baby single- origin coffee kickstarter lo.", + }, + { + title: 'A little more about me', + body: 'Raclette shoreditch before they sold out lyft.', + }, + { + title: 'What is the meaning of life?', + body: 'Meh waistcoat succulents umami asymmetrical, hoodie post-ironic paleo chillwave tote bag.', + }, + ] + + Promise.all( + posts.map(async (post) => { + const newPost = await db.post.create({ + data: { title: post.title, body: post.body }, + }) + + logger.debug({ data: newPost }, 'Added post') + }) + ) + } catch (error) { + logger.error(error) + } +} +``` + +You can pipe the script output to the formatter: + +```bash +yarn rw prisma db seed | yarn rw-log-formatter +``` + +> Note: Just be sure to set `data` attribute, so the formatter recognizes the content. +> For example: `logger.debug({ data: newPost }, 'Added post')` + +### prisma migrate + +Update the database schema with migrations. + +> 👉 Quick link to the [Prisma Concepts](https://www.prisma.io/docs/concepts/components/prisma-migrate). + +``` +yarn redwood prisma migrate <command> +``` + +As a database toolkit, Prisma strives to be as holistic as possible. Prisma Migrate lets you use Prisma schema to make changes to your database declaratively, all while keeping things deterministic and fully customizable by generating the migration steps in a simple, familiar format: SQL. + +Since migrate generates plain SQL files, you can edit those SQL files before applying the migration using `yarn redwood prisma migrate --create-only`. This creates the migration based on the changes in the Prisma schema, but doesn't apply it, giving you the chance to go in and make any modifications you want. [Daniel Norman's tour of Prisma Migrate](https://www.youtube.com/watch?v=0LKhksstrfg) demonstrates this and more to great effect. + +Prisma Migrate has separate commands for applying migrations based on whether you're in dev or in production. The Prisma [Migration flows](https://www.prisma.io/docs/concepts/components/prisma-migrate/prisma-migrate-flows) goes over the difference between these workflows in more detail. + +#### prisma migrate dev + +Create a migration from changes in Prisma schema, apply it to the database, trigger generators (e.g. Prisma Client). + +> 👉 Quick link to the [Prisma CLI Reference](https://www.prisma.io/docs/reference/api-reference/command-reference#migrate-dev). + +``` +yarn redwood prisma migrate dev +``` + +<!-- #### reset --> + +<!-- Reset your database and apply all migrations, all data will be lost. --> + +<!-- > 👉 Quick link to the [Prisma CLI Reference](https://www.prisma.io/docs/reference/api-reference/command-reference#migrate-reset). --> + +<!-- ``` --> +<!-- yarn redwood prisma migrate reset --> +<!-- ``` --> + +#### prisma migrate deploy + +Apply pending migrations to update the database schema in production/staging. + +> 👉 Quick link to the [Prisma CLI Reference](https://www.prisma.io/docs/reference/api-reference/command-reference#migrate-deploy). + +``` +yarn redwood prisma migrate deploy +``` + +#### prisma migrate reset + +This command deletes and recreates the database, or performs a "soft reset" by removing all data, tables, indexes, and other artifacts. + +It'll also re-seed your database by automatically running the `db seed` command. See [prisma db seed](#prisma-db-seed). + +> **_Important:_** For use in development environments only + +## record + +> This command is experimental and its behavior may change. + +Commands for working with RedwoodRecord. + +### record init + +Parses `schema.prisma` and caches the datamodel as JSON. Reads relationships between models and adds some configuration in `api/src/models/index.js`. + +``` +yarn rw record init +``` + +## redwood-tools (alias rwt) + +Redwood's companion CLI development tool. You'll be using this if you're contributing to Redwood. See [Contributing](https://github.com/redwoodjs/redwood/blob/main/CONTRIBUTING.md#cli-reference-redwood-tools) in the Redwood repo. + +## setup + +Initialize configuration and integrate third-party libraries effortlessly. + +``` +yarn redwood setup <category> +``` + +| Commands | Description | +| ------------------ | ------------------------------------------------------------------------------------------ | +| `auth` | Set up auth configuration for a provider | +| `cache` | Set up cache configuration for memcached or redis | +| `custom-web-index` | Set up an `index.js` file, so you can customize how Redwood web is mounted in your browser | +| `deploy` | Set up a deployment configuration for a provider | +| `generator` | Copy default Redwood generator templates locally for customization | +| `i18n` | Set up i18n | +| `package` | Peform setup actions by running a third-party npm package | +| `tsconfig` | Add relevant tsconfig so you can start using TypeScript | +| `ui` | Set up a UI design or style library | +| `webpack` | Set up a webpack config file in your project so you can add custom config | + +### setup auth + +Integrate an auth provider. + +``` +yarn redwood setup auth <provider> +``` + +| Arguments & Options | Description | +| :------------------ | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `provider` | Auth provider to configure. Choices are `auth0`, `azureActiveDirectory`, `clerk`, `dbAuth`, `ethereum`, `firebase`, `goTrue`, `magicLink`, `netlify`, `nhost`, and `supabase` | +| `--force, -f` | Overwrite existing configuration | + +#### Usage + +See [Authentication](authentication.md). + +### setup cache + +This command creates a setup file in `api/src/lib/cache.{ts|js}` for connecting to a Memcached or Redis server and allows caching in services. See the [**Caching** section of the Services docs](/docs/services#caching) for usage. + +``` +yarn redwood setup cache <client> +``` + +| Arguments & Options | Description | +| :------------------ | :------------------------------------------------------ | +| `client` | Name of the client to configure, `memcached` or `redis` | +| `--force, -f` | Overwrite existing files | + +### setup custom-web-index + +:::warning This command only applies to projects using Webpack + +As of v6, all Redwood projects use Vite by default. +When switching projects to Vite, we made the decision to add the the entry file, `web/src/entry.client.{jsx,tsx}`, back to projects. + +::: + +Redwood automatically mounts your `<App />` to the DOM, but if you want to customize how that happens, you can use this setup command to generate an `index.js` file in `web/src`. + +``` +yarn redwood setup custom-web-index +``` + +| Arguments & Options | Description | +| :------------------ | :----------------------- | +| `--force, -f` | Overwrite existing files | + +### setup generator + +Copies a given generator's template files to your local app for customization. The next time you generate that type again, it will use your custom template instead of Redwood's default. + +``` +yarn rw setup generator <name> +``` + +| Arguments & Options | Description | +| :------------------ | :------------------------------------------------------------ | +| `name` | Name of the generator template(s) to copy (see help for list) | +| `--force, -f` | Overwrite existing copied template files | + +#### Usage + +If you wanted to customize the page generator template, run the command: + +``` +yarn rw setup generator page +``` + +And then check `web/generators/page` for the page, storybook and test template files. You don't need to keep all of these templates—you could customize just `page.tsx.template` and delete the others and they would still be generated, but using the default Redwood templates. + +The only exception to this rule is the scaffold templates. You'll get four directories, `assets`, `components`, `layouts` and `pages`. If you want to customize any one of the templates in those directories, you will need to keep all the other files inside of that same directory, even if you make no changes besides the one you care about. (This is due to the way the scaffold looks up its template files.) For example, if you wanted to customize only the index page of the scaffold (the one that lists all available records in the database) you would edit `web/generators/scaffold/pages/NamesPage.tsx.template` and keep the other pages in that directory. You _could_ delete the other three directories (`assets`, `components`, `layouts`) if you don't need to customize them. + +**Name Variants** + +Your template will receive the provided `name` in a number of different variations. + +For example, given the name `fooBar` your template will receive the following _variables_ with the given _values_ + +| Variable | Value | +| :--------------------- | :--------- | +| `pascalName` | `FooBar` | +| `camelName` | `fooBar` | +| `singularPascalName` | `FooBar` | +| `pluralPascalName` | `FooBars` | +| `singularCamelName` | `fooBar` | +| `pluralCamelName` | `fooBars` | +| `singularParamName` | `foo-bar` | +| `pluralParamName` | `foo-bars` | +| `singularConstantName` | `FOO_BAR` | +| `pluralConstantName` | `FOO_BARS` | + +#### Example + +Copying the cell generator templates: + +```bash +~/redwood-app$ yarn rw setup generator cell +yarn run v1.22.4 +$ /redwood-app/node_modules/.bin/rw setup generator cell + ✔ Copying generator templates... + ✔ Wrote templates to /web/generators/cell +✨ Done in 2.33s. +``` + +### setup deploy (config) + +Set up a deployment configuration. + +``` +yarn redwood setup deploy <provider> +``` + +| Arguments & Options | Description | +| :------------------ | :---------------------------------------------------------------------------------------------------- | +| `provider` | Deploy provider to configure. Choices are `baremetal`, `coherence`, `edgio`, `flightcontrol`, `netlify`, `render`, `vercel`, or `aws-serverless [deprecated]`, | +| `--database, -d` | Database deployment for Render only [choices: "none", "postgresql", "sqlite"] [default: "postgresql"] | +| `--force, -f` | Overwrite existing configuration [default: false] | + +#### setup deploy netlify + +When configuring Netlify deployment, the `setup deploy netlify` command generates a `netlify.toml` [configuration file](https://docs.netlify.com/configure-builds/file-based-configuration/) with the defaults needed to build and deploy a RedwoodJS site on Netlify. + +The `netlify.toml` file is a configuration file that specifies how Netlify builds and deploys your site — including redirects, branch and context-specific settings, and more. + +This configuration file also defines the settings needed for [Netlify Dev](https://docs.netlify.com/configure-builds/file-based-configuration/#netlify-dev) to detect that your site uses the RedwoodJS framework. Netlify Dev serves your RedwoodJS app as if it runs on the Netlify platform and can serve functions, handle Netlify [headers](https://docs.netlify.com/configure-builds/file-based-configuration/#headers) and [redirects](https://docs.netlify.com/configure-builds/file-based-configuration/#redirects). + +Netlify Dev can also create a tunnel from your local development server that allows you to share and collaborate with others using `netlify dev --live`. + +``` +// See: netlify.toml +// ... +[dev] + # To use [Netlify Dev](https://www.netlify.com/products/dev/), + # install netlify-cli from https://docs.netlify.com/cli/get-started/#installation + # and then use netlify link https://docs.netlify.com/cli/get-started/#link-and-unlink-sites + # to connect your local project to a site already on Netlify + # then run netlify dev and our app will be accessible on the port specified below + framework = "redwoodjs" + # Set targetPort to the [web] side port as defined in redwood.toml + targetPort = 8910 + # Point your browser to this port to access your RedwoodJS app + port = 8888 +``` + +In order to use [Netlify Dev](https://www.netlify.com/products/dev/) you need to: + +- install the latest [netlify-cli](https://docs.netlify.com/cli/get-started/#installation) +- use [netlify link](https://docs.netlify.com/cli/get-started/#link-and-unlink-sites) to connect to your Netlify site +- ensure that the `targetPort` matches the [web] side port in `redwood.toml` +- run `netlify dev` and your site will be served on the specified `port` (e.g., 8888) +- if you wish to share your local server with others, you can run `netlify dev --live` + +> Note: To detect the RedwoodJS framework, please use netlify-cli v3.34.0 or greater. + +### setup mailer + +This command adds the necessary packages and files to get started using the RedwoodJS mailer. By default it also creates an example mail template which can be skipped with the `--skip-examples` flag. + +``` +yarn redwood setup mailer +``` + +| Arguments & Options | Description | +| :---------------------- | :----------------------------- | +| `--force, -f` | Overwrite existing files | +| `--skip-examples` | Do not include example content, such as a React email template | + +### setup package + +This command takes a published npm package that you specify, performs some compatibility checks, and then executes its bin script. This allows you to use third-party packages that can provide you with an easy-to-use setup command for the particular functionality they provide. + +This command behaves similarly to `yarn dlx` but will attempt to confirm compatibility between the package you are attempting to run and the current version of Redwood you are running. You can bypass this check by passing the `--force` flag if you feel you understand any potential compatibility issues. + +``` +yarn redwood setup package <npm-package> +``` + +| Arguments & Options | Description | +| :------------------ | :----------------------- | +| `--force, -f` | Forgo compatibility checks | + +#### Usage + +Run the made up `@redwoodjs/setup-example` package: +```bash +~/redwood-app$ yarn rw setup package @redwoodjs/setup-example +``` + +Run the same package but using a particular npm tag and avoiding any compatibility checks: +```bash +~/redwood-app$ yarn rw setup package @redwoodjs/setup-example@beta --force +``` + +**Compatibility Checks** + +We perform a simple compatibility check in an attempt to make you aware of potential compatibility issues with setup packages you might wish to run. This works by examining the version of `@redwoodjs/core` you are using within your root `package.json`. We compare this value with a compatibility range the npm package specifies in the `engines.redwoodjs` field of its own `package.json`. If the version of `@redwoodjs/core` you are using falls outside of the compatibility range specified by the package you are attempting to run, we will warn you and ask you to confirm that you wish to continue. + +It's the author of the npm package's responsibility to specify the correct compatibility range, so **you should always research the packages you use with this command**. Especially since they will be executing code on your machine! + +### setup graphql + +This command creates the necessary files to support GraphQL features like fragments and trusted documents. + +#### Usage + +Run `yarn rw setup graphql <feature>` + +#### setup graphql fragments + +This command creates the necessary configuration to start using [GraphQL Fragments](./graphql/fragments.md). + +``` +yarn redwood setup graphql fragments +``` + +| Arguments & Options | Description | +| :------------------ | :--------------------------------------- | +| `--force, -f` | Overwrite existing files and skip checks | + +#### Usage + +Run `yarn rw setup graphql fragments` + +#### Example + +```bash +~/redwood-app$ yarn rw setup graphql fragments +✔ Update Redwood Project Configuration to enable GraphQL Fragments +✔ Generate possibleTypes.ts +✔ Import possibleTypes in App.tsx +✔ Add possibleTypes to the GraphQL cache config +``` + +#### setup graphql trusted-documents + +This command creates the necessary configuration to start using [GraphQL Trusted Documents](./graphql/trusted-documents.md). + + +``` +yarn redwood setup graphql trusted-documents +``` + +#### Usage + +Run `yarn rw setup graphql trusted-documents` + +#### Example + +```bash +~/redwood-app$ yarn rw setup graphql trusted-documents +✔ Update Redwood Project Configuration to enable GraphQL Trusted Documents ... +✔ Generating Trusted Documents store ... +✔ Configuring the GraphQL Handler to use a Trusted Documents store ... +``` + + +If you have not setup the RedwoodJS server file, it will be setup: + +```bash +✔ Adding the experimental server file... +✔ Adding config to redwood.toml... +✔ Adding required api packages... +``` + + +### setup realtime + +This command creates the necessary files, installs the required packages, and provides examples to setup RedwoodJS Realtime from GraphQL live queries and subscriptions. See the Realtime docs for more information. + +``` +yarn redwood setup realtime +``` + +| Arguments & Options | Description | +| :------------------ | :----------------------- | +| `-e, --includeExamples, --examples` | Include examples of how to implement liveQueries and subscriptions. Default: true. | +| `--force, -f` | Forgo compatibility checks | + +:::note + +If the RedwoodJS Server is not setup, it will be installed as well. + +::: + +#### Usage + +Run `yarn rw setup realtime` + +#### Example + +```bash +~/redwood-app$ yarn rw setup realtime +✔ Checking for realtime environment prerequisites ... +✔ Adding required api packages... +✔ Adding the realtime api lib ... +✔ Adding Countdown example subscription ... +✔ Adding NewMessage example subscription ... +✔ Adding Auctions example live query ... +✔ Generating types ... +``` + + +If you have not setup the RedwoodJS server file, it will be setup: + +```bash +✔ Adding the experimental server file... +✔ Adding config to redwood.toml... +✔ Adding required api packages... +``` + +### setup tsconfig + +Add a `tsconfig.json` to both the web and api sides so you can start using [TypeScript](typescript/index). + +``` +yarn redwood setup tsconfig +``` + +| Arguments & Options | Description | +| :------------------ | :----------------------- | +| `--force, -f` | Overwrite existing files | + + + +### setup ui + +Set up a UI design or style library. Right now the choices are [TailwindCSS](https://tailwindcss.com/), [Chakra UI](https://chakra-ui.com/), and [Mantine UI](https://ui.mantine.dev/). + +``` +yarn rw setup ui <library> +``` + +| Arguments & Options | Description | +| :------------------ | :-------------------------------------------------------------------------------------- | +| `library` | Library to configure. Choices are `chakra-ui`, `tailwindcss`, and `mantine` | +| `--force, -f` | Overwrite existing configuration | + +## storybook + +Starts Storybook locally + +```bash +yarn redwood storybook +``` + +[Storybook](https://storybook.js.org/docs/react/get-started/introduction) is a tool for UI development that allows you to develop your components in isolation, away from all the conflated cruft of your real app. + +> "Props in, views out! Make it simple to reason about." + +RedwoodJS supports Storybook by creating stories when generating cells, components, layouts and pages. You can then use these to describe how to render that UI component with representative data. + +| Arguments & Options | Description | +| :------------------ | :------------------------------------------------------------------------------------------------- | +| `--open` | Open Storybook in your browser on start [default: true]. Pass `--no-open` to disable this behavior | +| `--build` | Build Storybook | +| `--port` | Which port to run Storybook on [default: 7910] | + +## test + +Run Jest tests for api and web. + +```bash +yarn redwood test [side..] +``` + +| Arguments & Options | Description | +| ------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `sides or filter` | Which side(s) to test, and/or a regular expression to match against your test files to filter by | +| `--help` | Show help | +| `--version` | Show version number | +| `--watch` | Run tests related to changed files based on hg/git (uncommitted files). Specify the name or path to a file to focus on a specific set of tests [default: true] | +| `--watchAll` | Run all tests | +| `--collectCoverage` | Show test coverage summary and output info to `coverage` directory in project root. See this directory for an .html coverage report | +| `--clearCache` | Delete the Jest cache directory and exit without running tests | +| `--db-push` | Syncs the test database with your Prisma schema without requiring a migration. It creates a test database if it doesn't already exist [default: true]. This flag is ignored if your project doesn't have an `api` side. [👉 More details](#prisma-db-push). | + +> **Note** all other flags are passed onto the jest cli. So for example if you wanted to update your snapshots you can pass the `-u` flag + +## type-check (alias tsc or tc) + +Runs a TypeScript compiler check on both the api and the web sides. + +```bash +yarn redwood type-check [side] +``` + +| Arguments & Options | Description | +| ------------------- | ------------------------------------------------------------------------------ | +| `side` | Which side(s) to run. Choices are `api` and `web`. Defaults to `api` and `web` | + +#### Usage + +See [Running Type Checks](typescript/introduction.md#running-type-checks). + +## serve + +Runs a server that serves both the api and the web sides. + +```bash +yarn redwood serve [side] +``` + +> You should run `yarn rw build` before running this command to make sure all the static assets that will be served have been built. + +`yarn rw serve` is useful for debugging locally or for self-hosting—deploying a single server into a serverful environment. Since both the api and the web sides run in the same server, CORS isn't a problem. + +| Arguments & Options | Description | +| ------------------- | ------------------------------------------------------------------------------ | +| `side` | Which side(s) to run. Choices are `api` and `web`. Defaults to `api` and `web` | +| `--port` | What port should the server run on [default: 8911] | +| `--socket` | The socket the server should run. This takes precedence over port | + +### serve api + +Runs a server that only serves the api side. + +``` +yarn rw serve api +``` + +This command uses `apiUrl` in your `redwood.toml`. Use this command if you want to run just the api side on a server (e.g. running on Render). + +| Arguments & Options | Description | +| ------------------- | ----------------------------------------------------------------- | +| `--port` | What port should the server run on [default: 8911] | +| `--socket` | The socket the server should run. This takes precedence over port | +| `--apiRootPath` | The root path where your api functions are served | + +For the full list of Server Configuration settings, see [this documentation](app-configuration-redwood-toml.md#api). +If you want to format your log output, you can pipe the command to the Redwood LogFormatter: + +``` +yarn rw serve api | yarn rw-log-formatter +``` + +### serve web + +Runs a server that only serves the web side. + +``` +yarn rw serve web +``` + +This command serves the contents in `web/dist`. Use this command if you're debugging (e.g. great for debugging prerender) or if you want to run your api and web sides on separate servers, which is often considered a best practice for scalability (since your api side likely has much higher scaling requirements). + +> **But shouldn't I use nginx and/or equivalent technology to serve static files?** +> +> Probably, but it can be a challenge to setup when you just want something running quickly! + +| Arguments & Options | Description | +| ------------------- | ------------------------------------------------------------------------------------- | +| `--port` | What port should the server run on [default: 8911] | +| `--socket` | The socket the server should run. This takes precedence over port | +| `--apiHost` | Forwards requests from the `apiUrl` (defined in `redwood.toml`) to the specified host | + +If you want to format your log output, you can pipe the command to the Redwood LogFormatter: + +``` +yarn rw serve web | yarn rw-log-formatter +``` + +## upgrade + +Upgrade all `@redwoodjs` packages via an interactive CLI. + +```bash +yarn redwood upgrade +``` + +This command does all the heavy-lifting of upgrading to a new release for you. + +Besides upgrading to a new stable release, you can use this command to upgrade to either of our unstable releases, `canary` and `rc`, or you can upgrade to a specific release version. + +A canary release is published to npm every time a PR is merged to the `main` branch, and when we're getting close to a new release, we publish release candidates. + +| Option | Description | +| :-------------- | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `--dry-run, -d` | Check for outdated packages without upgrading | +| `--tag, -t` | Choices are "rc", "canary", "latest", "next", "experimental", or a specific version (e.g. "0.19.3"). WARNING: Unstable releases in the case of "canary", "rc", "next", and "experimental". And "canary" releases include breaking changes often requiring codemods if upgrading a project. | + +#### Example + +Upgrade to the most recent canary: + +```bash +yarn redwood upgrade -t canary +``` + +Upgrade to a specific version: + +```bash +yarn redwood upgrade -t 0.19.3 +``` + +## Background checks + +The CLI can check for things in the background, like new versions of the framework, while you dev. + +Right now it can only check for new versions. +If you'd like it to do so, set `notifications.versionUpdates` in the `redwood.toml` file to include an array of the tags you're interested in hearing about. +(The former has priority.) + +By default, the CLI won't check for upgrades—you have to opt into it. + +You'll see this notification once a day at most. And the CLI will check for it once a day at most. So, nothing heavy-handed going on here. diff --git a/docs/versioned_docs/version-7.0/connection-pooling.md b/docs/versioned_docs/version-7.0/connection-pooling.md new file mode 100644 index 000000000000..750c012b3076 --- /dev/null +++ b/docs/versioned_docs/version-7.0/connection-pooling.md @@ -0,0 +1,105 @@ +--- +description: Scale your serverless functions +--- + +# Connection Pooling + +> ⚠ **Work in Progress** ⚠️ +> +> There's more to document here. In the meantime, you can check our [community forum](https://community.redwoodjs.com/search?q=connection%20pooling) for answers. +> +> Want to contribute? Redwood welcomes contributions and loves helping people become contributors. +> You can edit this doc [here](https://github.com/redwoodjs/redwoodjs.com/blob/main/docs/connectionPooling.md). +> If you have any questions, just ask for help! We're active on the [forums](https://community.redwoodjs.com/c/contributing/9) and on [discord](https://discord.com/channels/679514959968993311/747258086569541703). + +Production Redwood apps should enable connection pooling in order to properly scale with your Serverless functions. + +## Prisma Data Proxy + +The [Prisma Data Proxy](https://www.prisma.io/docs/data-platform/data-proxy) provides database connection management and pooling for Redwood apps using Prisma. It supports MySQL and Postgres databases in either the U.S. or EU regions. + +To set up a Prisma Data Proxy, sign up for the [Prisma Data Platform](https://www.prisma.io/data-platform) for free. In your onboarding workflow, plug in the connection URL for your database and choose your region. This will generate a connection string for your app. Then follow the instructions in [Prisma's documentation](https://www.prisma.io/docs/concepts/data-platform/data-proxy). + +> Note that the example uses npm. Rather than using npm, you can access the Prisma CLI using `yarn redwood prisma` inside a Redwood app. + +## Prisma & PgBouncer + +PgBouncer holds a connection pool to the database and proxies incoming client connections by sitting between Prisma Client and the database. This reduces the number of processes a database has to handle at any given time. PgBouncer passes on a limited number of connections to the database and queues additional connections for delivery when space becomes available. + + +To use Prisma Client with PgBouncer from a serverless function, add the `?pgbouncer=true` flag to the PostgreSQL connection URL: + +``` +postgresql://USER:PASSWORD@HOST:PORT/DATABASE?pgbouncer=true +``` + +Typically, your PgBouncer port will be 6543 which is different from the Postgres default of 5432. + +> Note that since Prisma Migrate uses database transactions to check out the current state of the database and the migrations table, if you attempt to run Prisma Migrate commands in any environment that uses PgBouncer for connection pooling, you might see an error. +> +> To work around this issue, you must connect directly to the database rather than going through PgBouncer when migrating. + +For more information on Prisma and PgBouncer, please refer to Prisma's Guide on [Configuring Prisma Client with PgBouncer](https://www.prisma.io/docs/guides/performance-and-optimization/connection-management/configure-pg-bouncer). + +## Supabase + +For Postgres running on [Supabase](https://supabase.io) see: [PgBouncer is now available in Supabase](https://supabase.io/blog/2021/04/02/supabase-pgbouncer#using-connection-pooling-in-supabase). + +All new Supabase projects include connection pooling using [PgBouncer](https://www.pgbouncer.org/). + +We recommend that you connect to your Supabase Postgres instance using SSL which you can do by setting `sslmode` to `require` on the connection string: + +``` +// not pooled typically uses port 5432 +postgresql://postgres:mydb.supabase.co:5432/postgres?sslmode=require +// pooled typically uses port 6543 +postgresql://postgres:mydb.supabase.co:6543/postgres?sslmode=require&pgbouncer=true +``` + +## Heroku +For Postgres, see [Postgres Connection Pooling](https://devcenter.heroku.com/articles/postgres-connection-pooling). + +Heroku does not officially support MySQL. + + +## Digital Ocean +For Postgres, see [How to Manage Connection Pools](https://www.digitalocean.com/docs/databases/postgresql/how-to/manage-connection-pools) + +To run migrations through a connection pool, you're required to append connection parameters to your `DATABASE_URL`. Prisma needs to know to use pgbouncer (which is part of Digital Ocean's connection pool). If omitted, you may receive the following error: + +``` +Error: Migration engine error: +db error: ERROR: prepared statement "s0" already exists +``` + +To resolve this, use the following structure in your `DATABASE_URL`: + +``` +<YOUR_CONNECTION_POOL_URL>:25061/defaultdb?connection_limit=3&sslmode=require&pgbouncer=true&connect_timeout=10&pool_timeout=30 +``` +Here's a couple more things to be aware of: +- When using a Digital Ocean connection pool, you'll have multiple ports available. Typically the direct connection (without connection pooling) is on port `25060` and the connection through pgbouncer is served through port `25061`. Make sure you connect to your connection pool on port `25061` +- Adjust the `connection_limit`. Clusters provide 25 connections per 1 GB of RAM. Three connections per cluster are reserved for maintenance, and all remaining connections can be allocated to connection pools +- Both `pgbouncer=true` and `pool_timeout=30` are required to deploy successfully through your connection pool + +Connection Pooling for MySQL is not yet supported. + +## AWS +Use [Amazon RDS Proxy](https://aws.amazon.com/rds/proxy) for MySQL or PostgreSQL. + +From the [AWS Docs](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/rds-proxy.html#rds-proxy.limitations): +>Your RDS Proxy must be in the same VPC as the database. The proxy can't be publicly accessible. + +Because of this limitation, with out-of-the-box configuration, you can only use RDS Proxy if you're deploying your Lambda Functions to the same AWS account. Alternatively, you can use RDS directly, but you might require larger instances to handle your production traffic and the number of concurrent connections. + + +## Why Connection Pooling? + +Relational databases have a maximum number of concurrent client connections. + +* Postgres allows 100 by default +* MySQL allows 151 by default + +In a traditional server environment, you would need a large amount of traffic (and therefore web servers) to exhaust these connections, since each web server instance typically leverages a single connection. + +In a Serverless environment, each function connects directly to the database, which can exhaust limits quickly. To prevent connection errors, you should add a connection pooling service in front of your database. Think of it as a load balancer. diff --git a/docs/versioned_docs/version-7.0/contributing-overview.md b/docs/versioned_docs/version-7.0/contributing-overview.md new file mode 100644 index 000000000000..623d70ae02a3 --- /dev/null +++ b/docs/versioned_docs/version-7.0/contributing-overview.md @@ -0,0 +1,168 @@ +--- +title: Contributing +description: There's several ways to contribute to Redwood +slug: contributing +--- + +# Contributing: Overview and Orientation + +Love Redwood and want to get involved? You’re in the right place and in good company! As of this writing, there are more than [250 contributors](https://github.com/redwoodjs/redwood/blob/main/README.md#contributors) who have helped make Redwood awesome by contributing code and documentation. This doesn't include all those who participate in the vibrant, helpful, and encouraging Forums and Discord, which are both great places to get started if you have any questions. + +There are several ways you can contribute to Redwood: + +- join the [community Forums](https://community.redwoodjs.com/) and [Discord server](https://discord.gg/jjSYEQd) — encourage and help others 🙌 +- [triage issues on the repo](https://github.com/redwoodjs/redwood/issues) and [review PRs](https://github.com/redwoodjs/redwood/pulls) 🩺 +- write and edit [docs](#contributing-docs) ✍️ +- and of course, write code! 👩‍💻 + +_Before interacting with the Redwood community, please read and understand our [Code of Conduct](https://github.com/redwoodjs/redwood/blob/main/CODE_OF_CONDUCT.md#contributor-covenant-code-of-conduct)._ + +> ⚡️ **Quick Links** +> +> There are several contributing docs and references, each covering specific topics: +> +> 1. 🧭 **Overview and Orientation** (👈 you are here) +> 2. 📓 [Reference: Contributing to the Framework Packages](https://github.com/redwoodjs/redwood/blob/main/CONTRIBUTING.md) +> 3. 🪜 [Step-by-step Walkthrough](contributing-walkthrough.md) (including Video Recording) +> 4. 📈 [Current Project Status](https://github.com/orgs/redwoodjs/projects/11) +> 5. 🤔 What should I work on? +> - [Good First Issue](https://redwoodjs.com/good-first-issue) +> - [Discovery Process and Open Issues](#what-should-i-work-on) + +## The Characteristics of a Contributor +More than committing code, contributing is about human collaboration and relationship. Our community mantra is **“By helping each other be successful with Redwood, we make the Redwood project successful.”** We have a specific vision for the effect this project and community will have on you — it should give you superpowers to build+create, progress in skills, and help advance your career. + +So who do you need to become to achieve this? Specifically, what characteristics, skills, and capabilities will you need to cultivate through practice? Here are our suggestions: +- Empathy +- Gratitude +- Generosity + +All of these are applicable in relation to both others and yourself. The goal of putting them into practice is to create trust that will be a catalyst for risk-taking (another word to describe this process is “learning”!). These are the ingredients necessary for productive, positive collaboration. + +And you thought all this was just about opening a PR 🤣 Yes, it’s a super rewarding experience. But that’s just the beginning! + +## What should I work on? +Even if you know the mechanics, it’s hard to get started without a starting place. Our best advice is this — dive into the Redwood Tutorial, read the docs, and build your own experiment with Redwood. Along the way, you’ll find typos, out-of-date (or missing) documentation, code that could work better, or even opportunities for improving and adding features. You’ll be engaging in the Forums and Chat and developing a feel for priorities and needs. This way, you’ll naturally follow your own interests and sooner than later intersect “things you’re interested in” + “ways to help improve Redwood”. + +There are other more direct ways to get started as well, which are outlined below. + +### Project Boards and GitHub Issues + +The Redwood Core Team is working publicly — progress is updated daily on the [Release Project Board](https://github.com/orgs/redwoodjs/projects/11). + +Eventually, all this leads you back to Redwood’s GitHub Issues page. Here you’ll find open items that need help, which are organized by labels. There are four labels helpful for contributing: +1. [Good First Issue](https://github.com/redwoodjs/redwood/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22): these items are more likely to be an accessible entry point to the Framework. It’s less about skill level and more about focused scope. +2. [Help Wanted](https://github.com/redwoodjs/redwood/issues?q=is%3Aissue+is%3Aopen+label%3A%22help+wanted%22): these items especially need contribution help from the community. +3. [Bugs 🐛](https://github.com/redwoodjs/redwood/issues?q=is%3Aissue+is%3Aopen+label%3Abug%2Fconfirmed): last but not least, we always need help with bugs. Some are technically less challenging than others. Sometimes the best way you can help is to attempt to reproduce the bug and confirm whether or not it’s still an issue. + +### Create a New Issue +Anyone can create a new Issue. If you’re not sure that your feature or idea is something to work on, start the discussion with an Issue. Describe the idea and problem + solution as clearly as possible, including examples or pseudo code if applicable. It’s also very helpful to `@` mention a maintainer or Core Team member that shares the area of interest. + +Just know that there’s a lot of Issues that shuffle every day. If no one replies, it’s just because people are busy. Reach out in the Forums, Chat, or comment in the Issue. We intend to reply to every Issue that’s opened. If yours doesn’t have a reply, then give us a nudge! + +Lastly, it can often be helpful to start with brief discussion in the community Chat or Forums. Sometimes that’s the quickest way to get feedback and a sense of priority before opening an Issue. + +## Contributing Code + +Redwood's composed of many packages that are designed to work together. Some of these packages are designed to be used outside Redwood too! + +Before you start contributing, you'll want to set up your local development environment. The Redwood repo's top-level [contributing guide](https://github.com/redwoodjs/redwood/blob/main/CONTRIBUTING.md#local-development) walks you through this. Make sure to give it an initial read. + +For details on contributing to a specific package, see the package's README (links provided in the table below). Each README has a section named Roadmap. If you want to get involved but don't quite know how, the Roadmap's a good place to start. See anything that interests you? Go for it! And be sure to let us know—you don't have to have a finished product before opening an issue or pull request. In fact, we're big fans of [Readme Driven Development](https://tom.preston-werner.com/2010/08/23/readme-driven-development.html). + +What you want to do not on the roadmap? Well, still go for it! We love spikes and proof-of-concepts. And if you have a question, just ask! + +### RedwoodJS Framework Packages +|Package|Description| +|:-|:-| +|[`@redwoodjs/api-server`](https://github.com/redwoodjs/redwood/blob/main/packages/api-server/README.md)|Run a Redwood app using Fastify server (alternative to serverless API)| +|[`@redwoodjs/api`](https://github.com/redwoodjs/redwood/blob/main/packages/api/README.md)|Infrastructure components for your applications UI including logging, webhooks, authentication decoders and parsers, as well as tools to test custom serverless functions and webhooks| +|[`@redwoodjs/auth`](https://github.com/redwoodjs/redwood/blob/main/packages/auth/README.md#contributing)|A lightweight wrapper around popular SPA authentication libraries| +|[`@redwoodjs/cli`](https://github.com/redwoodjs/redwood/blob/main/packages/cli/README.md)|All the commands for Redwood's built-in CLI| +|[`@redwoodjs/codemods`](https://github.com/redwoodjs/redwood/blob/main/packages/codemods/README.md)|Codemods that automate upgrading a Redwood project| +|[`@redwoodjs/core`](https://github.com/redwoodjs/redwood/blob/main/packages/core/README.md)|Defines babel plugins and config files| +|[`@redwoodjs/create-redwood-app`](https://github.com/redwoodjs/redwood/blob/main/packages/create-redwood-app/README.md)|Enables `yarn create redwood-app`—downloads the latest release of Redwood and extracts it into the supplied directory| +|[`@redwoodjs/eslint-config`](https://github.com/redwoodjs/redwood/blob/main/packages/eslint-config/README.md)|Defines Redwood's eslint config| +|[`@redwoodjs/forms`](https://github.com/redwoodjs/redwood/blob/main/packages/forms/README.md)|Provides Form helpers| +|[`@redwoodjs/graphql-server`](https://github.com/redwoodjs/redwood/blob/main/packages/graphql-server/README.md)|Exposes functions to build the GraphQL API, provides services with `context`, and a set of envelop plugins to supercharge your GraphQL API with logging, authentication, error handling, directives and more| +|[`@redwoodjs/internal`](https://github.com/redwoodjs/redwood/blob/main/packages/internal/README.md)|Provides tooling to parse Redwood configs and get a project's paths| +|[`@redwoodjs/prerender`](https://github.com/redwoodjs/redwood/blob/main/packages/prerender/README.md)|Defines functionality for prerendering static content| +|[`@redwoodjs/record`](https://github.com/redwoodjs/redwood/blob/main/packages/record/README.md)|ORM built on top of Prisma. It may be extended in the future to wrap other database access packages| +|[`@redwoodjs/router`](https://github.com/redwoodjs/redwood/blob/main/packages/router/README.md)|The built-in router for Redwood| +|[`@redwoodjs/structure`](https://github.com/redwoodjs/redwood/blob/main/packages/structure/README.md)|Provides a way to build, validate and inspect an object graph that represents a complete Redwood project| +|[`@redwoodjs/telemetry`](https://github.com/redwoodjs/redwood/blob/main/packages/telemetry/README.md)|Provides functionality for anonymous data collection| +|[`@redwoodjs/testing`](https://github.com/redwoodjs/redwood/blob/main/packages/testing/README.md)|Provides helpful defaults when testing a Redwood project's web side| +|[`@redwoodjs/web`](https://github.com/redwoodjs/redwood/blob/main/packages/web/README.md)|Configures a Redwood's app web side: wraps the Apollo Client in `RedwoodApolloProvider`; defines the Cell HOC| + +## Contributing Docs + +First off, thank you for your interest in contributing docs! Redwood prides itself on good developer experience, and that includes good documentation. + +Before you get started, there's an implicit doc-distinction that we should make explicit: all the docs on redwoodjs.com are for helping people develop apps using Redwood, while all the docs on the Redwood repo are for helping people contribute to Redwood. + +Although Developing and Contributing docs are in different places, they most definitely should be linked and referenced as needed. For example, it's appropriate to have a "Contributing" doc on redwoodjs.com that's context-appropriate, but it should link to the Framework's [CONTRIBUTING.md](https://github.com/redwoodjs/redwood/blob/main/CONTRIBUTING.md) (the way this doc does). + +### How Redwood Thinks about Docs + +Before we get into the how-to, a little explanation. When thinking about docs, we find [divio's documentation system](https://documentation.divio.com/) really useful. It's not necessary that a doc always have all four of the dimensions listed, but if you find yourself stuck, you can ask yourself questions like "Should I be explaining? Am I explaining too much? Too little?" to reorient yourself while writing. + +### Docs for Developing Redwood Apps + +redwoodjs.com has three kinds of Developing docs: References, How To's, and The Tutorial. +You can find References and How To's within their respective directories on the redwood/redwood repo: [docs/](https://github.com/redwoodjs/redwood/tree/main/docs) and [how-to/](https://github.com/redwoodjs/redwood/tree/main/docs/how-to). + +The Tutorial is a standalone document that serves a specific purpose as an introduction to Redwood, an aspirational roadmap, and an example of developer experience. As such, it's distinct from the categories mentioned, although it's most similar to How To's. + +#### References + +References are explanation-driven how-to content. They're more direct and to-the-point than The Tutorial and How To's. The idea is much more about finding something or getting something done than any kind of learning journey. + +Before you take on a doc, you should read [Forms](forms.md) and [Router](router.md); they have the kind of content you should be striving for. They're comprehensive yet conversational. + +In general, don't be afraid to go into too much detail. We'd rather you err on the side of too much than too little. One tip for finding good content is searching the forum and repo for "prior art"—what are people talking about where this comes up? + +#### How To's + +How To's are tutorial-style content focused on a specific problem-solution. They usually have a beginner in mind (if not, they should indicate that they don't—put 'Advanced' or 'Deep-Dive', etc., in the title or introduction). How To's may include some explanatory text as asides, but they shouldn't be the majority of the content. + +#### Making a Doc Findable + +If you write it, will they read it? We think they will—if they can find it. + +After you've finished writing, step back for a moment and consider the word(s) or phrase(s) people will use to find what you just wrote. For example, let's say you were writing a doc about configuring a Redwood app. If you didn't know much about configuring a Redwood app, a heading (in the nav bar to the left) like "redwood.toml" wouldn't make much sense, even though it _is_ the main configuration file. You'd probably look for "Redwood Config" or "Settings", or type "how to change Redwood App settings" in the "Search the docs" bar up top, or in Google. + +That is to say, the most useful headings aren't always the most literal ones. Indexing is more than just underlining the "important" words in a text—it's identifying and locating the concepts and topics that are the most relevant to our readers, the users of our documentation. + +So, after you've finished writing, reread what you wrote with the intention of making a list of two to three keywords or phrases. Then, try to use each of those in three places, in this order of priority: + +- the left-nav menu title +- the page title or the first right-nav ("On this page") section title +- the introductory paragraph + +### Docs for Contributing to the Redwood Repo + +These docs are in the Framework repo, redwoodjs/redwood, and explain how to contribute to Redwood packages. They're the docs linked to in the table above. + +In general, they should consist of more straightforward explanations, are allowed to be technically heavy, and should be written for a more experienced audience. But as a best practice for collaborative projects, they should still provide a Vision + Roadmap and identify the project-point person(s) (or lead(s)). + +## What makes for a good Pull Request? +In general, we don’t have a formal structure for PRs. Our goal is to make it as efficient as possible for anyone to open a PR. But there are some good practices, which are flexible. Just keep in mind that after opening a PR there’s more to do before getting to the finish line: +1. Reviews from other contributors and maintainers +2. Update code and, after maintainer approval, merge-in changes to the `main` branch +3. Once PR is merged, it will be released and added to the next version Release Notes with a link for anyone to look at the PR and understand it. + +Some tips and advice: +- **Connect the dots and leave a breadcrumb**: link to related Issues, Forum discussions, etc. Help others follow the trail leading up to this PR. +- **A Helpful Description**: What does the code in the PR do and what problem does it solve? How can someone use the code? Code sample, Screenshot, Quick Video… Any or all of this is so so good. +- **Draft or Work in Progress**: You don’t have to finish the code to open a PR. Once you have a start, open it up! Most often the best way to move an Issue forward is to see the code in action. Also, often this helps identify ways forward before you spend a lot of time polishing. +- **Questions, Items for Discussion, Etc.**: Another reason to open a Draft PR is to ask questions and get direction via review. +- **Loop in a Maintainer for Feedback and Review**: ping someone with an `@`. And nudge again in a few days if there’s no reply. We appreciate it and truly don’t want the PR to get lost in the shuffle! +- **Next Steps**: Once the PR is merged, will there be a follow up step? If so, link to an Issue. How about Docs to-do or Docs to-merge? + +The best thing you can do is look through existing PRs, which will give you a feel for how things work and what you think is helpful. + +### Example PR +If you’re looking for an example of “what makes a good PR”, look no further than this one by Kim-Adeline: +- [Convert component generator to TS #632](https://github.com/redwoodjs/redwood/pull/632) + +Not every PR needs this much information. But it’s definitely helpful when it does! diff --git a/docs/versioned_docs/version-7.0/contributing-walkthrough.md b/docs/versioned_docs/version-7.0/contributing-walkthrough.md new file mode 100644 index 000000000000..89560c3907db --- /dev/null +++ b/docs/versioned_docs/version-7.0/contributing-walkthrough.md @@ -0,0 +1,251 @@ +--- +title: Contributing Walkthrough +description: Watch a video of the contributing process +--- + +# Contributing: Step-by-Step Walkthrough (with Video) + +> ⚡️ **Quick Links** +> +> There are several contributing docs and references, each covering specific topics: +> +> 1. 🧭 [Overview and Orientation](contributing-overview.md) +> 2. 📓 [Reference: Contributing to the Framework Packages](https://github.com/redwoodjs/redwood/blob/main/CONTRIBUTING.md) +> 3. 🪜 **Step-by-step Walkthrough** (👈 you are here) +> 4. 📈 [Current Project Status: v1 Release Board](https://github.com/orgs/redwoodjs/projects/6) +> 5. 🤔 What should I work on? +> - ["Help Wanted" v1 Triage Board](https://redwoodjs.com/good-first-issue) +> - [Discovery Process and Open Issues](contributing-overview.md#what-should-i-work-on) + + +## Video Recording of Complete Contributing Process +The following recording is from a Contributing Workshop, following through the exact steps outlined below. The Workshop includes additional topics along with Q&A discussion. + +<iframe + class="w-full" + style={{ height: '24rem' }} + src="https://www.youtube.com/embed/aZs_9g-5Ms8" + frameborder="0" + allow="accelerometer; autoplay; encrypted-media; gyroscope; picture-in-picture; modestbranding; showinfo=0; fullscreen" +></iframe> + +## Prologue: Getting Started with Redwood and GitHub (and git) +These are the foundations for contributing, which you should be familiar with before starting the walkthrough. + +[**The Redwood Tutorial**](tutorial/foreword.md) + +The best (and most fun) way to learn Redwood and the underlying tools and technologies. + +**Docs and How To** + +- Start with the [Introduction](https://github.com/redwoodjs/redwood/blob/main/README.md) Doc +- And browse through [How To's](how-to/index) + +### GitHub (and Git) +Diving into Git and the GitHub workflow can feel intimidating if you haven’t experienced it before. The good news is there’s a lot of great material to help you learn and be committing in no time! + +- [Introduction to GitHub](https://lab.github.com/githubtraining/introduction-to-github) (overview of concepts and workflow) +- [First Day on GitHub](https://lab.github.com/githubtraining/first-day-on-github) (including Git) +- [First Week on GitHub](https://lab.github.com/githubtraining/first-week-on-github) (parts 3 and 4 might be helpful) + +## The Full Workflow: From Local Development to a New PR + +### Definitions +#### Redwood “Project” +We refer to the codebase of a Redwood application as a Project. This is what you install when you run `yarn create redwood-app <path-to-directory>`. It’s the thing you are building with Redwood. + +Lastly, you’ll find the template used to create a new project (when you run create redwood-app) here in GitHub: [redwoodjs/redwood/packages/create-redwood-app/template/](https://github.com/redwoodjs/redwood/tree/main/packages/create-redwood-app/template) + +We refer to this as the **CRWA Template or Project Template**. + +#### Redwood “Framework” +The Framework is the codebase containing all the packages (and other code) that is published on NPMjs.com as `@redwoodjs/<package-name>`. The Framework repository on GitHub is here: [https://github.com/redwoodjs/redwood](https://github.com/redwoodjs/redwood) + +### Development tools +These are the tools used and recommended by the Core Team. + +**VS Code** +[Download VS Code](https://code.visualstudio.com/download) +This has quickly become the de facto editor for JavaScript and TypeScript. Additionally, we have added recommended VS Code Extensions to use when developing both the Framework and a Project. You’ll see a pop-up window asking you about installing the extensions when you open up the code. + +**GitHub Desktop** +[Download GitHub Desktop](https://desktop.github.com) +You’ll need to be comfortable using Git at the command line. But the thing we like best about GitHub Desktop is how easy it makes workflow across GitHub -- GitHub Desktop -- VS Code. You don’t have to worry about syncing permissions or finding things. You can start from a repo on GitHub.com and use Desktop to do everything from “clone and open on your computer” to returning back to the site to “open a PR on GitHub”. + +**[Mac OS] iTerm and Oh-My-Zsh** +There’s nothing wrong with Terminal (on Mac) and plain zsh or bash. (If you’re on Windows, we highly recommend using Git for Windows and Git bash.) But we enjoy using iTerm2 ([download](https://iterm2.com)) and zsh much more (combined with [Oh My Zsh](https://ohmyz.sh)). Heads up, you can get lost in the world of theming and adding plugins. We recommend keeping it simple for awhile before taking the customization deep dive +😉. + +**[Windows] Git for Windows with Git Bash or WSL(2)** +Unfortunately, there are a lot of “gotchas” when it comes to working with Javascript-based frameworks on Windows. We do our best to point out (and resolve) issues, but our priority focus is on developing a Redwood app vs contributing to the Framework. (If you’re interested, there’s a lengthy Forum conversation about this with many suggestions.) + +All that said, we highly recommend using one of the following setups to maximize your workflow: +1. Use [Git for Windows and Git Bash](how-to/windows-development-setup.md) (included in installation) +2. Use [WSL following this setup guide on the Forums](https://community.redwoodjs.com/t/windows-subsystem-for-linux-setup/2439) + +Lastly, the new Gitpod integration is a great option and only getting better. You might just want to start using it from the beginning (see section below in “Local Development Setup”). + +**Gitpod** +We recently added an integration with [Gitpod](http://gitpod.io) that automatically creates a Framework dev workspace, complete with test project, in a browser-based VS Code environment. It’s pretty amazing and we highly recommend giving it a shot. (If you’re developing on Windows, it’s also an amazing option for you anytime you run into something that isn’t working correctly or supported.) + +But don’t skip out reading the following steps in “Local Development Setup” — Gitpod uses the same workflow and tools to initialize. If you want to develop in Gitpod, you’ll need to understand how it all works. + +But when you’re ready, learn how to use it in the section at the end [“GitPod: Browser-based Development”](#gitpod-browser-based-development). + +### Local Development Setup +#### Step 1: Redwood Framework +1. **Fork the [Redwood Framework](https://github.com/redwoodjs/redwood)** into a personal repo +2. Using GitHub Desktop, **open the Framework Codebase** in a VS Code workspace +3. Commands to “**start fresh**” when working on the Framework + - `yarn install`: This installs the package dependencies in /node_modules using Yarn package manager. This command is the same as just typing `yarn`. Also, if you ever switch branches and want to make sure the install dependencies are correct, you can run `yarn install --force` (shorthand `yarn -f`). + - `git clean -fxd`: *You’ll only need to do this if you’ve already been developing and want to “start over” and reset your codebase*. This command will permanently delete everything that is .gitignored, e.g. /node_modules and /dist directories with package builds. When switching between branches, this command makes sure nothing is carried over that you don’t want. (Warning: it will delete .env files in a Redwood Project. To avoid this, you can use `git clean -fxd -e .env`.) +4. **Create a new branch** from the `main` branch +First make sure you’ve pulled all changes from the remote origin (GitHub repo) into your local branch. (If you just cloned from your fork, you should be up to date.) Then create a new branch. The nomenclature used by David Price is `<davids_initials>-description-with-hyphens`, e.g. `dsp-add-eslint-config-redwood-toml`. It's simple to use VS Code or GitHub Desktop to manage branches. You can also do this via the CLI git checkout command. + +#### Step 2: Test Project +There are several options for creating a local Redwood Project to use during development. Anytime you are developing against a test project, there are some specific gotchas to keep in mind: +- New projects always use the latest stable version of the Redwood packages, which will not be up to date with the latest Framework code in the `main` branch. +- To use the packages corresponding with the latest code in the Framework `main` branch, you can use the canary version published to NPM. All you need to do to install the canary versions is run `yarn rw upgrade --tag canary` in your Project +- Using a cloned project or repo? Just know there are likely breaking changes in `main` that haven’t been applied. You can examine merged PRs with the “breaking” label for more info. +- Just because you are using canary doesn’t mean you are using your local Framework branch code! Make sure you run `yarn rwfw project:sync`. And anytime you switch branches or get out of sync, you might need to start over beginning with the `git clean -fxd` command + +With those details out of the way, now is the time to choose an option below that meets your needs based on functionality and codebase version. + +**Build a Functional Test Project [Recommended]** +1. 👉 **Use the build script to create a test project**: From the Framework root directory, run `yarn build:test-project <path/to/directory>`. This command installs a new project using the Template codebase from your current Framework branch, it then adds Tutorial features, and finally it initializes the DB (with seed data!). It should work 90% of the time and is the recommended starting place. We also use this out-of-the-box with Gitpod. + +**Other Options to create a project** + +2. **Install a fresh project using the local Framework template code:** Sometimes you need to create a project that uses the Template codebase in your local branch of the Framework, e.g. your changes include modifications to the CRWA Template and need to be tested. Running the command above is exactly the same as `yarn create redwood- app …`, only it runs the command from your local Framework package using the local Template codebase. Note: this is the same command used at the start of the `yarn build:test-project` command. +``` +yarn babel-node packages/create-redwood-app/src/create-redwood-app.js <path/to/project> +``` + +3. **Clone the Redwood Tutorial App repo:** This is the codebase to use when starting the Redwood Tutorial Part 2. It is updated to the latest version and has the Blog features. This is often something we use for local development. Note: be sure to upgrade to canary and look out for breaking changes coming with the next release. + + +4. **Install a fresh project**: `yarn create redwood-app <path/to/project>` If you just need a fresh installation 1) using the latest version template codebase and 2) without any features, then just install a new Redwood project. Note: this can have the same issues regarding the need to upgrade to canary and addressing breaking changes (see Notes from items 2 and 3 above). + +> Note: All the options above currently set the language to JavaScript. If you would like to work with TypeScript, you can add the option `--typescript` to either of the commands that run the create-redwood-app installation. + +#### Step 3: Link the local Framework with the local test Project +Once you work on the Framework code, you’ll most often want to run the code in a Redwood app for testing. However, the Redwood Project you created for testing is currently using the latest version (or canary) packages of Redwood published on NPMjs.com, e.g. [@redwoodjs/core](https://www.npmjs.com/package/@redwoodjs/core) + +So we’ll use the Redwood Framework (rwfw) command to connect our local Framework and test Projects, which allows the Project to run on the code for Packages we are currently developing. + +Run this command from the CLI in your test Project: +``` +RWFW_PATH=<framework directory> yarn rwfw project:sync +``` + +For Example: +``` +cd redwood-project +RWFW_PATH=~/redwood yarn rwfw project:sync +``` + +RWFW_PATH is the path to your local copy of the Redwood Framework. _Once provided to rwfw, it'll remember it and you shouldn't have to provide it again unless you move it._ + +> **Heads up for Windows Devs** +> Depending on your dev setup, Windows might balk at you setting the env var RWFW_PATH at the beginning of the command like this. If so, try prepending with `cross-env`, e.g. `yarn cross-env RWFW_PATH=~/redwood yarn rwfw` ... Or you can add the env var and value directly to your shell before running the command. + +As project:sync starts up, it'll start logging to the console. In order, it: +1. cleans and builds the framework +2. copies the framework's dependencies to your project +3. runs yarn install in your project +4. copies over the framework's packages to your project +5. waits for changes + +Step two is the only explicit change you'll see to your project. You'll see that a ton of packages have been added to your project's root package.json. + +All done? You’re ready to kill the link process with “ctrl + c”. You’ll need to confirm your root package.json no longer has the added dependencies. And, if you want to reset your test-project, you should run `yarn install --force`. + +#### Step 4: Framework Package(s) Local Testing +Within your Framework directory, use the following tools and commands to test your code: +1. **Build the packages**: `yarn build` + - to delete all previous build directories: yarn build:clean +2. **Syntax and Formatting**: `yarn lint` + - to fix errors or warnings: `yarn lint:fix` +3. **Run unit tests for each package**: `yarn test` +4. **Run through the Cypress E2E integration tests**: `yarn e2e` +5. **Check Yarn resolutions and package.json format**: `yarn check` + +All of these checks are included in Redwood’s GitHub PR Continuous Integration (CI) automation. However, it’s good practice to understand what they do by using them locally. The E2E tests aren’t something we use every time anymore (because it takes a while), but you should learn how to use it because it comes in handy when your code is failing tests on GitHub and you need to diagnose. + +> **Heads up for Windows Devs** +> The Cypress E2E does *not* work on Windows. Two options are available if needed: +> 1. Use Gitpod (see related section for info) +> 2. When you create a PR, just ask for help from a maintainer + +#### Step 5: Open a PR 🚀 +You’ve made it to the fun part! It’s time to use the code you’re working on to create a new PR into the Redwood Framework `main` branch. + +We use GitHub Desktop to walk through the process of: +- Committing my changes to my development branch +- Publishing (pushing) my branch and changes to my GitHub repo fork of the Redwood Framework +- Opening a PR requesting to merge my forked-repo branch into the Redwood Framework `main` branch + +> While we use GitHub Desktop as an example, the basic process outlined above is the same whether using the command line or other clients. + +1. **Commit Files:** Using GitHub Desktop, browse to your local Redwood Framework repository and select the current branch you're working on. On the left-hand side, you'll see the files that have been modified, added, or removed. Check the boxes for the files you want to include in the PR. Below the file list, you'll see two text boxes and a "Commit to <your-branch-name>" button. Write a short commit message in the first box. If you want to add a longer description then you can do so in the second box. Click the "Commit to ..." button to commit the changes to the branch. The files are now committed under that commit message. + +2. **Push Files:** After committing, you should see an option appear with the count of local commits and a button to "Push origin." If you're ready to push those changes to the remote branch, click that button. Otherwise, you can keep working and add more commits using the process in step 1. + +3. **Create Pull Request:** Once the commit(s) have been pushed, you should see another option for "Create Pull Request." This will open a browser window to GitHub's "Open a pull request" form. Fill out the appropriate information, check the box to "Allow edits by maintainers," and submit! + +> If you are following along and are not using GitHub Desktop, after pushing your commits, you can open a pull request by visiting [github.com](https://github.com) and browsing to your fork. There should be a button at the top to submit a pull request. + +You have successfully submitted your PR! + +**Note:** Make sure you check the box that allows project maintainers to update your branch. This option is found on the "Open a pull request" form below the description textbox. Checking this option helps move a PR forward more quickly, as branches always need to be updated from `main` before we can merge. + +**When is my code “ready” to open a PR?** +Most of the action, communication, and decisions happen within a PR. A common mistake new contributors make is *waiting* until their code is “perfect” before opening a PR. Assuming your PR has some code changes, it’s great practice to open a [Draft PR](https://github.blog/2019-02-14-introducing-draft-pull-requests/) (setting during the PR creation), which you can use to start discussion and ask questions. PRs are closed all the time without being merged, often because they are replaced by another PR resulting from decisions and discussion. It’s part of the process. More importantly, it means collaboration is happening! + +What isn’t a fun experience is spending a whole bunch of time on code that ends up not being the correct direction or is unnecessary/redundant to something that already exists. This is a part of the learning process. But it’s another reason to open a draft PR sooner than later to get confirmation and questions out of the way before investing time into refining and details. + +When in doubt, just try first and ask for help and direction! + +Refer to the [What makes for a good Pull Request?](contributing-overview.md#what-makes-for-a-good-pull-request) section in [Contributing Overview](contributing-overview.md)for general good practices when opening PR. + +### Gitpod: Browser-based Development +[Gitpod](http://gitpod.io) has recently been integrated with Redwood to JustWork™ with any branch or PR. When a virtual Gitpod workspace is initialized, it automatically: +1. Checks-out the code from your branch or PR +2. Run Yarn installation +3. Creates the functional Test Project via `yarn build:test-project` +4. Syncs the Framework code with the Test Project +5. Starts the Test Project dev server +6. 🤯 + +> **Chrome works best** +> We’ve noticed some bugs using Gitpod with either Brave or Safari. Currently we recommend sticking to Chrome (although it’s worth trying out Edge and Firefox). + +**Demo of Gitpod** +David briefly walks-through an automatically prebuilt Gitpod workspace here: +- [Gitpod + RedwoodJS 3-minute Walkthrough](https://youtu.be/_kMuTW3x--s) + +Make sure you watch until the end where David shows how to set up your integration with GitHub and VS Code sync. 🤩 + +**Start a Gitpod Workspace** +There are two ways to get started with Gitpod + Redwood. + +*Option 1: Open a PR* +Every PR will trigger a Gitpod prebuild using the PR branch. Just look for Gitpod in the list of checks at the bottom of the PR — click the “Details” link and away you’ll go! + +<img width="350" alt="PR Checks" src="https://user-images.githubusercontent.com/2951/151928088-58e26232-b752-4471-adf4-a2bc59b79ac8.png" /> + +*Option 2: Use the link from your project or branch* + +You can initialize a workspace using this URL pattern: + +``` +https://gitpod.io/#<URL for branch or project> +``` + +For example, this link will start a workspace using the RedwoodJS main branch: +- https://gitpod.io/#https://github.com/redwoodjs/redwood + +And this link will start a workspace for a PR #3434: +- https://gitpod.io/#https://github.com/redwoodjs/redwood/pull/3434 + + diff --git a/docs/versioned_docs/version-7.0/cors.md b/docs/versioned_docs/version-7.0/cors.md new file mode 100644 index 000000000000..5325cedc2ba0 --- /dev/null +++ b/docs/versioned_docs/version-7.0/cors.md @@ -0,0 +1,263 @@ +--- +title: Cross-Origin Resource Sharing +description: For when you need to worry about CORS +--- + +# CORS + +CORS stands for [Cross Origin Resource Sharing](https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS). In a nutshell, by default, browsers aren't allowed to access resources outside their own domain. + +## When you need to worry about CORS + +If your api and web sides are deployed to different domains, you'll have to worry about CORS. For example, if your web side is deployed to `example.com` but your api is `api.example.com`. For security reasons your browser will not allow XHR requests (like the kind that the GraphQL client makes) to a domain other than the one currently in the browser's address bar. + +This will become obvious when you point your browser to your site and see none of your GraphQL data. When you look in the web inspector you'll see a message along the lines of: + +> ⛔️ Access to fetch https://api.example.com has been blocked by CORS policy: Response to preflight request doesn't pass access control check: No 'Access-Control-Allow-Origin' header is present on the requested resource. + +## Avoiding CORS + +Dealing with CORS can complicate your app and make it harder to deploy to new hosts, run in different environments, etc. Is there a way to avoid CORS altogether? + +Yes! If you can add a proxy between your web and api sides, all requests will *appear* to be going to and from the same domain (the web side, even though behind the scenes they are forwarded somewhere else). This functionality is included automatically with hosts like [Netlify](https://docs.netlify.com/routing/redirects/rewrites-proxies/#proxy-to-another-service) or [Vercel](https://vercel.com/docs/cli#project-configuration/rewrites). With a host like [Render](https://render-web.onrender.com/docs/deploy-redwood#deployment) you can enable a proxy with a simple config option. Most providers should provide this functionality through a combination of provider-specific config and/or web server configuration. + +## GraphQL Config + +You'll need to add CORS headers to GraphQL responses. You can do this easily enough by adding the `cors` option in `api/src/functions/graphql.js` (or `graphql.ts`): + +```diff +export const handler = createGraphQLHandler({ + loggerConfig: { logger, options: {} }, + directives, + sdls, + services, ++ cors: { ++ origin: 'https://www.example.com', // <-- web side domain ++ }, + onException: () => { + db.$disconnect() + }, +}) +``` + +Note that the `origin` needs to be a complete URL including the scheme (`https`). This is the domain that requests are allowed to come *from*. In this example we assume the web side is served from `https://www.example.com`. If you have multiple servers that should be allowed to access the api, you can pass an array of them instead: + +```jsx +cors: { + origin: ['https://example.com', 'https://www.example.com'] +}, +``` + +The proper one will be included in the CORS header depending on where the response came from. + +## Authentication Config + +The following config only applies if you're using [dbAuth](authentication.md#self-hosted-auth-installation-and-setup), which is Redwood's own cookie-based auth system. + +You'll need to configure several things: + +* Add CORS config for GraphQL +* Add CORS config for the auth function +* Cookie config for the auth function +* Allow sending of credentials in GraphQL XHR requests +* Allow sending of credentials in auth function requests + +Here's how you configure each of these: + +### GraphQL CORS Config + +You'll need to add CORS headers to GraphQL responses, and let the browser know to send up cookies with any requests. Add the `cors` option in `api/src/functions/graphql.js` (or `graphql.ts`) with an additional `credentials` property: + +```diff +export const handler = createGraphQLHandler({ + loggerConfig: { logger, options: {} }, + directives, + sdls, + services, ++ cors: { ++ origin: 'https://www.example.com', // <-- web side domain ++ credentials: true, ++ }, + onException: () => { + db.$disconnect() + }, +}) +``` + +`origin` is the domain(s) that requests come *from* (the web side). + +### Auth CORS Config + +Similar to the `cors` options being sent to GraphQL, you can set similar options in `api/src/functions/auth.js` (or `auth.ts`): + +```diff +const authHandler = new DbAuthHandler(event, context, { + db: db, + authModelAccessor: 'user', + authFields: { + id: 'id', + username: 'email', + hashedPassword: 'hashedPassword', + salt: 'salt', + resetToken: 'resetToken', + resetTokenExpiresAt: 'resetTokenExpiresAt', + }, ++ cors: { ++ origin: 'https://www.example.com', // <-- web side domain ++ credentials: true, ++ }, + cookie: { + HttpOnly: true, + Path: '/', + SameSite: 'Strict', + Secure: true, + }, + forgotPassword: forgotPasswordOptions, + login: loginOptions, + resetPassword: resetPasswordOptions, + signup: signupOptions, +}) +``` + +Just like the GraphQL config, `origin` is the domain(s) that requests come *from* (the web side). + +### Cookie Config + +In order to be able accept cookies from another domain we'll need to make a change to the `SameSite` option in `api/src/functions/auth.js` and set it to `None`: + +```jsx {4} + cookie: { + HttpOnly: true, + Path: '/', + SameSite: 'None', + Secure: true, + }, +``` + +### GraphQL XHR Credentials + +Next we need to tell the GraphQL client to include credentials (the dbAuth cookie) in any requests. This config goes in `web/src/App.{ts,js}`: + +```jsx {7-12} +import { AuthProvider, useAuth } from 'src/auth' + +const App = () => ( + <FatalErrorBoundary page={FatalErrorPage}> + <RedwoodProvider titleTemplate="%PageTitle | %AppTitle"> + <AuthProvider type="dbAuth"> + <RedwoodApolloProvider + useAuth={useAuth} + graphQLClientConfig={{ + httpLinkConfig: { credentials: 'include' }, + }} + > + <Routes /> + </RedwoodApolloProvider> + </AuthProvider> + </RedwoodProvider> + </FatalErrorBoundary> +) +``` + +### Auth XHR Credentials + +Finally, we need to tell dbAuth to include credentials in its own XHR requests. We'll do this within `web/src/auth.{ts,js}` when creating the `AuthProvider`: + +```jsx {3-5} +import { createDbAuthClient, createAuth } from '@redwoodjs/auth-dbauth-web' + +const dbAuthClient = createDbAuthClient({ + fetchConfig: { credentials: 'include' }, +}) + +export const { AuthProvider, useAuth } = createAuth(dbAuthClient) +``` + +## Testing CORS Locally + +If you've made the configuration changes above, `localhost` testing should continue working as normal. But, if you want to make sure your CORS config works without deploying to the internet somewhere, you'll need to do some extra work. + +### Serving Sides to the Internet + +First, you need to get the web and api sides to be serving from different hosts. A tool like [ngrok](https://ngrok.com/) or [localhost.run](https://localhost.run/) allows you to serve your local development environment over a real domain to the rest of the internet (on both `http` and `https`). + +You'll need to start two tunnels, one for the web side (this example assumes ngrok): + +```bash +> ngrok http 8910 + +Session Status online +Account Your Name (Plan: Pro) +Version 2.3.40 +Region United States (us) +Web Interface http://127.0.0.1:4040 +Forwarding http://3c9913de0c00.ngrok.io -> http://localhost:8910 +Forwarding https://3c9913de0c00.ngrok.io -> http://localhost:8910 +``` + +And another for the api side: + +```bash +> ngrok http 8911 + +Session Status online +Account Your Name (Plan: Pro) +Version 2.3.40 +Region United States (us) +Web Interface http://127.0.0.1:4040 +Forwarding http://fb6d701c44b5.ngrok.io -> http://localhost:8911 +Forwarding https://fb6d701c44b5.ngrok.io -> http://localhost:8911 +``` + +Note the two different domains. Copy the `https` domain from the api side because we'll need it in a moment. Even if the Redwood dev server isn't running you can leave these tunnels running, and when the dev server *does* start, they'll just start on those domains again. + +### `redwood.toml` Config + +You'll need to make two changes here: + +1. Bind the server to all network interfaces +2. Point the web side to the api's domain + +Normally the dev server only binds to `127.0.0.1` (home sweet home) which means you can only access it from your local machine using `localhost` or `127.0.0.1`. To tell it to bind to all network interfaces, and to be available to the outside world, add this `host` option: + +```toml {4} +[web] + title = "Redwood App" + port = 8910 + host = '0.0.0.0' + apiUrl = '/.redwood/functions' + includeEnvironmentVariables = [] +[api] + port = 8911 +[browser] + open = true +``` + +We'll also need to tell the web side where the api side lives. Update the `apiUrl` to whatever domain your api side is running on (remember the domain you copied from from ngrok): + +```toml {5} +[web] + title = "Redwood App" + port = 8910 + host = '0.0.0.0' + apiUrl = 'https://fb6d701c44b5.ngrok.io' + includeEnvironmentVariables = [] +[api] + port = 8911 +[browser] + open = true +``` + +Where you get this domain from will depend on how you expose your app to the outside world (this example assumes ngrok). + +### Starting the Dev Server + +You'll need to apply an option when starting the dev server to tell it to accept requests from any host, not just `localhost`: + +```bash +> yarn rw dev --fwd="--allowed-hosts all" +``` + +### Wrapping Up + +Now you should be able to open the web side's domain in a browser and use your site as usual. Test that GraphQL requests work, as well as authentication if you are using dbAuth. diff --git a/docs/versioned_docs/version-7.0/create-redwood-app.md b/docs/versioned_docs/version-7.0/create-redwood-app.md new file mode 100644 index 000000000000..61e2d15b5dfe --- /dev/null +++ b/docs/versioned_docs/version-7.0/create-redwood-app.md @@ -0,0 +1,103 @@ +--- +slug: create-redwood-app +description: Instructions and usage examples for Create Redwood App +--- + +# Create Redwood App + +To get up and running with Redwood, you can use Create Redwood App: + +```terminal +yarn create redwood-app <your-app-name> +``` + +## Set up for success +Redwood requires that you're running Node version 20 or higher. + +If you're running Node version 21.0.0 or higher, you can still use Create Redwood App, but it may make your project incompatible with some deploy targets, such as AWS Lambdas. + +To see what version of Node you're running, you can run the following command in your terminal: + +```terminal +node -v +``` + +If you need to update your version of Node or run multiple versions of Node, we recommend installing nvm and have [documentation about how to get up and running.](./how-to/using-nvm) + +You also need to have yarn version 1.22.21 or higher installed. To see what version of yarn you're running, you can run the following command in your terminal: + +```terminal +yarn -v +``` + +To upgrade your version of yarn, [you can refer to the yarn documentation](https://yarnpkg.com/getting-started/install). + +## What you can expect + +### Select your preferred language +Options: TypeScript (default) or JavaScript + +If you choose JavaScript, you can always [add TypeScript later](/docs/typescript/introduction#converting-a-javascript-project-to-typescript). + +### Do you want to initialize a git repo? +Options: yes (default) or no + +If you mark "yes", then it will ask you to **Enter a commit message**. The default message is "Initial commit." + +You can always initialize a git repo later and add a commit message by running the following commands in your terminal: + +```terminal +cd <your-app-name> +git init +git add . +git commit -m "Initial commit" +``` + +If you're new to git, here's a recommended playlist on YouTube: [git for Beginners](https://www.youtube.com/playlist?list=PLrz61zkUHJJFmfTgOVL1mBw_NZcgGe882) + +### Do you want to run `yarn install`? +Options: yes (default) or no + +_NOTE: This prompt will only display if you're running yarn, version 1._ + +This command will download all of your project's dependencies. + +If you mark "no", you can always run this command later: + +```terminal +cd <your-app-name> +yarn install +``` + +## Running the development server + +Once the Create Redwood app has finished running, you can start your development server by running the following command: + +```terminal +cd <your-app-name> +yarn rw dev +``` + +- This will start your development server at `http://localhost:8910`. +- Your API will be available at `http://localhost:8911`. +- You can visit the Redwood GraphQL Playground at `http://localhost:8911/graphql`. + +## Flags +You can by pass these prompts by using the following flags: + +| Flag | Alias | What it does | +| :--- | :--- | :--- | +| `--yarn-install` | | Run `yarn install` | +| `--typescript` | `ts` | Set TypeScript as the preferred language (pass `--no-typescript` to use JavaScript) | +| `--overwrite` | | Overwrites the existing directory, if it has the same name | +| `--git-init` | `git` | Initializes a git repository | +| `--commit-message "Initial commit"` | `m` | Specifies the initial git commit message | +| `--yes` | `y` | Automatically select all defaults | + +For example, here's the project with all flags enabled: + +```terminal +yarn create redwood-app <your-app-name> --typescript --git-init --commit-message "Initial commit" --yarn-install +``` + + diff --git a/docs/versioned_docs/version-7.0/custom-web-index.md b/docs/versioned_docs/version-7.0/custom-web-index.md new file mode 100644 index 000000000000..8fd30f548856 --- /dev/null +++ b/docs/versioned_docs/version-7.0/custom-web-index.md @@ -0,0 +1,51 @@ +--- +description: Change how App mounts to the DOM +--- + +# Custom Web Index + +:::warning This doc only applies to projects using Webpack + +As of v6, all Redwood projects use Vite by default. +When switching projects to Vite, we made the decision to add the the entry file, `web/src/entry.client.{jsx,tsx}`, back to projects. + +If you're using Webpack, this is all still applicable—keep reading. + +::: + +You may have noticed that there's no call to `ReactDOM.render` in your Redwood app. +That's because Redwood automatically mounts the `App` component in `web/src/App.js` to the DOM. +But if you need to customize how this happens, you can provide a file named `index.js` in `web/src` and Redwood will use that instead. + +## Setup + +To make this easy, there's a setup command that'll give you the file you need where you need it: + +``` +yarn rw setup custom-web-index +``` + +This generates a file named `index.js` in `web/src` that looks like this: + +```jsx title="web/src/index.js" +import { hydrateRoot, createRoot } from 'react-dom/client' + +import App from './App' +/** + * When `#redwood-app` isn't empty then it's very likely that you're using + * prerendering. So React attaches event listeners to the existing markup + * rather than replacing it. + * https://reactjs.org/docs/react-dom.html#hydrate + */ +const rootElement = document.getElementById('redwood-app') + +if (rootElement.hasChildNodes()) { + hydrateRoot(redwoodAppElement, <App />) +} else { + const root = createRoot(redwoodAppElement) + root.render(<App />) +} +``` + +This's actually the same file Redwood uses [internally](https://github.com/redwoodjs/redwood/blob/main/packages/web/src/entry/index.js). +So even if you don't customize anything, things still work the way they did. diff --git a/docs/versioned_docs/version-7.0/data-migrations.md b/docs/versioned_docs/version-7.0/data-migrations.md new file mode 100644 index 000000000000..3f7d32d389fd --- /dev/null +++ b/docs/versioned_docs/version-7.0/data-migrations.md @@ -0,0 +1,159 @@ +--- +description: Track changes to database content +--- + +# Data Migrations + +> Data Migrations are available as of RedwoodJS v0.15 + +There are two kinds of changes you can make to your database: + +* Changes to structure +* Changes to content + +In Redwood, [Prisma Migrate](https://www.prisma.io/docs/reference/tools-and-interfaces/prisma-migrate) takes care of codifying changes to your database *structure* in code by creating a snapshot of changes to your database that can be reliably repeated to end up in some known state. + +To track changes to your database *content*, Redwood includes a feature we call **Data Migration**. As your app evolves and you move data around, you need a way to consistently declare how that data should move. + +Imagine a `User` model that contains several columns for user preferences. Over time, you may end up with more and more preferences to the point that you have more preference-related columns in the table than you do data unique to the user! This is a common occurrence as applications grow. You decide that the app should have a new model, `Preference`, to keep track of them all (and `Preference` will have a foreign key `userId` to reference it back to its `User`). You'll use Prisma Migrate to create the new `Preference` model, but how do you copy the preference data to the new table? Data migrations to the rescue! + +## Installing + +Just like Prisma, we will store which data migrations have run in the database itself. We'll create a new database table `DataMigration` to keep track of which ones have run already. + +Rather than create this model by hand, Redwood includes a CLI tool to add the model to `schema.prisma` and create the DB migration that adds the table to the database: +``` +yarn rw data-migrate install +``` +You'll see a new directory created at `api/db/dataMigrations` which will store our individual migration tasks. + +Take a look at `schema.prisma` to see the new model definition: + +```jsx title="api/db/schema.prisma" +model RW_DataMigration { + version String @id + name String + startedAt DateTime + finishedAt DateTime +} +``` + +The install script also ran `yarn rw prisma migrate dev --create-only` automatically so you have a DB migration ready to go. You just need to run the `prisma migrate dev` command to apply it: +``` +yarn rw prisma migrate dev +``` +## Creating a New Data Migration + +Data migrations are just plain Typescript or Javascript files which export a single anonymous function that is given a single argument—an instance of `PrismaClient` called `db` that you can use to access your database. The files have a simple naming convention: +``` +{version}-{name}.js +``` +Where `version` is a timestamp, like `20200721123456` (an ISO8601 datetime without any special characters or zone identifier), and `name` is a param-case human readable name for the migration, like `copy-preferences`. + +To create a data migration we have a generator: +``` +yarn rw generate dataMigration copyPreferences +``` +This will create `api/db/dataMigrations/20200721123456-copy-preferences.js`: + +```jsx title="api/db/dataMigrations/20200721123456-copy-preferences.js" +export default async ({ db }) => { + // Migration here... +} +``` + +> **Why such a long name?** +> +> So that if multiple developers are creating data migrations, the chances of them creating one with the exact same filename is essentially zero, and they will all run in a predictable order—oldest to newest. + +Now it's up to you to define your data migration. In our user/preference example, it may look something like: + +```jsx title="api/db/dataMigrations/20200721123456-copy-preferences.js" +const asyncForEach = async (array, callback) => { + for (let index = 0; index < array.length; index++) { + await callback(array[index], index, array) + } +} + +export default async ({ db }) => { + const users = await db.user.findMany() + + asyncForEach(users, async (user) => { + await db.preference.create({ + data: { + newsletter: user.newsletter, + frequency: user.frequency, + theme: user.theme, + user: { connect: { id: user.id } } + } + }) + }) +} +``` + +This loops through each existing `User` and creates a new `Preference` record containing each of the preference-related fields from `User`. + +> Note that in a case like this where you're copying data to a new table, you would probably delete the columns from `User` afterwards. This needs to be a two step process! +> +> 1. Create the new table (db migration) and then move the data over (data migration) +> 2. Remove the unneeded columns from `User` +> +> When going to production, you would need to run this as two separate deploys to ensure no data is lost. +> +> The reason is that all DB migrations are run and *then* all data migrations. So if you had two DB migrations (one to create `Preference` and one to drop the unneeded columns from `User`) they would both run before the Data Migration, so the columns containing the preferences are gone before the data migration gets a chance to copy them over! +> +> **Remember**: Any destructive action on the database (removing a table or column especially) needs to be a two step process to avoid data loss. + +## Running a Data Migration + +When you're ready, you can execute your data migration with `data-migrate`'s `up` command: +``` +yarn rw data-migrate up +``` +This goes through each file in `api/db/dataMigrations`, compares it against the list of migrations that have already run according to the `DataMigration` table in the database, and executes any that aren't present in that table, sorted oldest to newest based on the timestamp in the filename. + +Any logging statements (like `console.info()`) you include in your data migration script will be output to the console as the script is running. + +If the script encounters an error, the process will abort, skipping any following data migrations. + +> The example data migration above didn't include this for brevity, but you should always run your data migration [inside a transaction](https://www.prisma.io/docs/reference/tools-and-interfaces/prisma-client/transactions#bulk-operations-experimental) so that if any errors occur during execution the database will not be left in an inconsistent state where only *some* of your changes were performed. + +## Long-term Maintainability + +Ideally you can run all database migrations and data migrations from scratch (like when a new developer joins the team) and have them execute correctly. Unfortunately you don't get that ideal scenario by default. + +Take our example above—what happens when a new developer comes long and attempts to setup their database? All DB migrations will run first (including the one that drops the preference-related columns from `User`) before the data migrations run. They will get an error when they try to read something like `user.newsletter` and that column doesn't exist! + +One technique to combat this is to check for the existence of these columns before the data migration does anything. If `user.newsletter` doesn't exist, then don't bother running the data migration at all and assume that your [seed data](cli-commands.md#prisma-db-seed) is already in the correct format: + +```jsx {4,15} +export default async ({ db }) => { + const users = await db.user.findMany() + + if (typeof user.newsletter !== undefined) { + asyncForEach(users, async (user) => { + await db.preference.create({ + data: { + newsletter: user.newsletter, + frequency: user.frequency, + theme: user.theme, + user: { connect: { id: user.id } } + } + }) + }) + } +} +``` + +## Lifecycle Summary + +Run once: +``` +yarn rw data-migrate install +yarn rw prisma migrate dev +``` +Run every time you need a new data migration: +``` +yarn rw generate dataMigration migrationName +yarn rw data-migrate up +``` diff --git a/docs/versioned_docs/version-7.0/deploy/baremetal.md b/docs/versioned_docs/version-7.0/deploy/baremetal.md new file mode 100644 index 000000000000..aec882f28381 --- /dev/null +++ b/docs/versioned_docs/version-7.0/deploy/baremetal.md @@ -0,0 +1,797 @@ +--- +description: Have complete control by hosting your own code +--- + +# Introduction to Baremetal + +Once you've grown beyond the confines and limitations of the cloud deployment providers, it's time to get serious: hosting your own code on big iron. Prepare for performance like you've only dreamed of! Also be prepared for IT and infrastructure responsibilities like you've only had nightmares of. + +With Redwood's Baremetal deployment option, the source (like your dev machine) will SSH into one or more remote machines and execute commands in order to update your codebase, run any database migrations and restart services. + +Deploying from a client (like your own development machine) consists of running a single command: + +First time deploy: + +```bash +yarn rw deploy baremetal production --first-run +``` + +Subsequent deploys: + +```bash +yarn rw deploy baremetal production +``` + +:::warning Deploying to baremetal is an advanced topic + +If you haven't done any kind of remote server work before, you may be in a little over your head to start with. But don't worry: until relatively recently (cloud computing, serverless, lambda functions) this is how all websites were deployed, so we've got a good 30 years of experience getting this working! + +If you're new to connecting to remote servers, check out the [Intro to Servers](/docs/intro-to-servers) guide we wrote just for you. + +::: + +## Deployment Lifecycle + +The Baremetal deploy runs several commands in sequence. These can be customized, to an extent, and some of them skipped completely: + +1. `git clone --depth=1` to retrieve the latest code +2. Symlink the latest deploy `.env` to the shared `.env` in the app dir +3. `yarn install` - installs dependencies +4. Runs prisma DB migrations +5. Generate Prisma client libs +6. Runs [data migrations](/docs/data-migrations) +7. Builds the web and/or api sides +8. Symlink the latest deploy dir to `current` in the app dir +9. Restart the serving process(es) +10. Remove older deploy directories + +### First Run Lifecycle + +If the `--first-run` flag is specified then step 6 above will execute the following commands instead: + - `pm2 start [service]` - starts the serving process(es) + - `pm2 save` - saves the running services to the deploy users config file for future startup. See [Starting on Reboot](#starting-on-reboot) for further information + +## Directory Structure + +Once you're deployed and running, you'll find a directory structure that looks like this: + +``` +└── var + └── www + └── myapp + ├── .env <────────────────┐ + ├── current ───symlink──┐ │ + └── releases │ │ + └── 20220420120000 <┘ │ + ├── .env ─symlink─┘ + ├── api + ├── web + ├── ... +``` + +There's a symlink `current` pointing to directory named for a timestamp (the timestamp of the last deploy) and within that is your codebase, the latest revision having been `clone`d. The `.env` file in that directory is then symlinked back out to the one in the root of your app path, so that it can be shared across deployments. + +So a reference to `/var/www/myapp/current` will always be the latest deployed version of your codebase. If you wanted to [setup nginx to serve your web side](#redwood-serves-api-nginx-serves-web-side), you would point it to `/var/www/myapp/current/web/dist` as the `root` and it will always be serving the latest code: a new deploy will change the `current` symlink and nginx will start serving the new files instantaneously. + +## App Setup + +Run the following to add the required config files to your codebase: + +```bash +yarn rw setup deploy baremetal +``` + +This will add dependencies to your `package.json` and create two files: + +1. `deploy.toml` contains server config for knowing which machines to connect to and which commands to run +2. `ecosystem.config.js` for [PM2](https://pm2.keymetrics.io/) to know what service(s) to monitor + +If you see an error from `gyp` you may need to add some additional dependencies before `yarn install` will be able to complete. See the README for `node-type` for more info: https://github.com/nodejs/node-gyp#installation + +### Configuration + +Before your first deploy you'll need to add some configuration. + +#### ecosystem.config.js + +By default, baremetal assumes you want to run the `yarn rw serve` command, which provides both the web and api sides. The web side will be available on port 8910 unless you update your `redwood.toml` file to make it available on another port. The default generated `ecosystem.config.js` will contain this config only, within a service called "serve": + +```jsx title="ecosystem.config.js" +module.exports = { + apps: [ + { + name: 'serve', + cwd: 'current', + script: 'node_modules/.bin/rw', + args: 'serve', + instances: 'max', + exec_mode: 'cluster', + wait_ready: true, + listen_timeout: 10000, + }, + ], +} +``` + +If you follow our recommended config [below](#redwood-serves-api-nginx-serves-web-side), you could update this to only serve the api side, because the web side will be handled by [nginx](https://www.nginx.com/). That could look like: + +```jsx title="ecosystem.config.js" +module.exports = { + apps: [ + { + name: 'api', + cwd: 'current', + script: 'node_modules/.bin/rw', + args: 'serve api', + instances: 'max', + exec_mode: 'cluster', + wait_ready: true, + listen_timeout: 10000, + }, + ], +} +``` + +#### deploy.toml + +This file contains your server configuration: which servers to connect to and which commands to run on them. + +```toml title="deploy.toml" +[[production.servers]] +host = "server.com" +username = "user" +agentForward = true +sides = ["api","web"] +packageManagerCommand = "yarn" +monitorCommand = "pm2" +path = "/var/www/app" +processNames = ["serve"] +repo = "git@github.com:myorg/myapp.git" +branch = "main" +keepReleases = 5 +``` + +This lists a single server, in the `production` environment, providing the hostname and connection details (`username` and `agentForward`), which `sides` are hosted on this server (by default it's both web and api sides), the `path` to the app code and then which PM2 service names should be (re)started on this server. + +#### Config Options + +* `host` - hostname to the server +* `port` - [optional] ssh port for server connection, defaults to 22 +* `username` - the user to login as +* `password` - [optional] if you are using password authentication, include that here +* `privateKey` - [optional] if you connect with a private key, include the content of the key here, as a buffer: `privateKey: Buffer.from('...')`. Use this *or* `privateKeyPath`, not both. +* `privateKeyPath` - [optional] if you connect with a private key, include the path to the key here: `privateKeyPath: path.join('path','to','key.pem')` Use this *or* `privateKey`, not both. +* `passphrase` - [optional] if your private key contains a passphrase, enter it here +* `agentForward` - [optional] if you have [agent forwarding](https://docs.github.com/en/developers/overview/using-ssh-agent-forwarding) enabled, set this to `true` and your own credentials will be used for further SSH connections from the server (like when connecting to GitHub) +* `sides` - An array of sides that will be built on this server +* `packageManagerCommand` - The package manager bin to call, defaults to `yarn` but could be updated to be prefixed with another command first, for example: `doppler run -- yarn` +* `monitorCommand` - The monitor bin to call, defaults to `pm2` but could be updated to be prefixed with another command first, for example: `doppler run -- pm2` +* `path` - The absolute path to the root of the application on the server +* `migrate` - [optional] Whether or not to run migration processes on this server, defaults to `true` +* `processNames` - An array of service names from `ecosystem.config.js` which will be (re)started on a successful deploy +* `repo` - The path to the git repo to clone +* `branch` - [optional] The branch to deploy (defaults to `main`) +* `keepReleases` - [optional] The number of previous releases to keep on the server, including the one currently being served (defaults to 5) + +The easiest connection method is generally to include your own public key in the server's `~/.ssh/authorized_keys` mannually or by running `ssh-copy-id user@server.com` from your local machine, [enable agent forwarding](https://docs.github.com/en/developers/overview/using-ssh-agent-forwarding), and then set `agentForward = true` in `deploy.toml`. This will allow you to use your own credentials when pulling code from GitHub (required for private repos). Otherwise you can create a [deploy key](https://docs.github.com/en/developers/overview/managing-deploy-keys) and keep it on the server. + +#### Using Environment Variables in `deploy.toml` + +Similarly to `redwood.toml`, `deploy.toml` supports interpolation of environment variables. For more details on how to use the environment variable interpolation see [Using Environment Variables in redwood.toml](/docs/app-configuration-redwood-toml#using-environment-variables-in-redwoodtoml) + +#### Multiple Servers + +If you start horizontally scaling your application you may find it necessary to have the web and api sides served from different servers. The configuration files can accommodate this: + +```toml title="deploy.toml" +[[production.servers]] +host = "api.server.com" +username = "user" +agentForward = true +sides = ["api"] +path = "/var/www/app" +processNames = ["api"] + +[[production.servers]] +host = "web.server.com" +username = "user" +agentForward = true +sides = ["web"] +path = "/var/www/app" +migrate = false +processNames = ["web"] +``` + +```jsx title="ecosystem.config.js" +module.exports = { + apps: [ + { + name: 'api', + cwd: 'current', + script: 'node_modules/.bin/rw', + args: 'serve api', + instances: 'max', + exec_mode: 'cluster', + wait_ready: true, + listen_timeout: 10000, + }, + { + name: 'web', + cwd: 'current', + script: 'node_modules/.bin/rw', + args: 'serve web', + instances: 'max', + exec_mode: 'cluster', + wait_ready: true, + listen_timeout: 10000, + }, + ], +} +``` + +Note the inclusion of `migrate = false` so that migrations are not run again on the web server (they only need to run once and it makes sense to keep them with the api side). + +You can add as many `[[servers]]` blocks as you need. + +#### Multiple Environments + +You can deploy to multiple environments from a single `deploy.toml` by including servers grouped by environment name: + +```toml title="deploy.toml" +[[production.servers]] +host = "prod.server.com" +username = "user" +agentForward = true +sides = ["api", "web"] +path = "/var/www/app" +processNames = ["serve"] + +[[staging.servers]] +host = "staging.server.com" +username = "user" +agentForward = true +sides = ["api", "web"] +path = "/var/www/app" +processNames = ["serve", "stage-logging"] +``` + +At deploy time, include the environment in the command: + +```bash +yarn rw deploy baremetal staging +``` + +Note that the codebase shares a single `ecosystem.config.js` file. If you need a different set of services running in different environments you'll need to simply give them a unique name and reference them in the `processNames` option of `deploy.toml` (see the additional `stage-logging` process in the above example). + +## Server Setup + +You will need to create the directory in which your app code will live. This path will be the `path` var in `deploy.toml`. Make sure the username you will connect as in `deploy.toml` has permission to read/write/execute files in this directory. For example, if your `/var` dir is owned by `root`, but you're going to deploy with a user named `deploy`: + +```bash +sudo mkdir -p /var/www/myapp +sudo chown deploy:deploy /var/www/myapp +``` + +You'll want to create an `.env` file in this directory containing any environment variables that are needed by your app (like `DATABASE_URL` at a minimum). This will be symlinked to each release directory so that it's available as the app expects (in the root directory of the codebase). + +:::warning SSH and Non-interactive Sessions + +The deployment process uses a '[non-interactive](https://tldp.org/LDP/abs/html/intandnonint.html)' SSH session to run commands on the remote server. A non-interactive session will often load a minimal amount of settings for better compatibility and speed. In some versions of Linux `.bashrc` by default does not load (by design) from a non-interactive session. This can lead to `yarn` (or other commands) not being found by the deployment script, even though they are in your path, because additional ENV vars are set in `~/.bashrc` which provide things like NPM paths and setup. + +A quick fix on some distros is to edit the deployment user's `~/.bashrc` file and comment out the lines that *stop* non-interactive processing. + +```diff title="~/.bashrc" +# If not running interactively, don't do anything +- case $- in +- *i*) ;; +- *) return;; +- esac + +# If not running interactively, don't do anything ++ # case $- in ++ # *i*) ;; ++ # *) return;; ++ # esac +``` + +This may also be a one-liner like: + +```diff title="~/.bashrc" +- [ -z "$PS1" ] && return ++ # [ -z "$PS1" ] && return +``` + +There are techniques for getting `node`, `npm` and `yarn` to be available without loading everything in `.bashrc`. See [this comment](https://github.com/nvm-sh/nvm/issues/1290#issuecomment-427557733) for some ideas. + +::: + +## First Deploy + +Back on your development machine, enter your details in `deploy.toml`, commit it and push it up, and then try a first deploy: + +```bash +yarn rw deploy baremetal production --first-run +``` + +If there are any issues the deploy should stop and you'll see the error message printed to the console. + +If it worked, hooray! You're deployed to BAREMETAL. If not, read on... + +### Troubleshooting + +On the server you should see a new directory inside the `path` you defined in `deploy.toml`. It should be a timestamp of the deploy, like: + +```bash +drwxrwxr-x 7 ubuntu ubuntu 4096 Apr 22 23:00 ./ +drwxr-xr-x 7 ubuntu ubuntu 4096 Apr 22 22:46 ../ +-rw-rw-r-- 1 ubuntu ubuntu 1167 Apr 22 20:49 .env +drwxrwxr-x 10 ubuntu ubuntu 4096 Apr 22 21:43 20220422214218/ +``` + +You may or may not also have a `current` symlink in the app directory pointing to that timestamp directory (it depends how far the deploy script got before it failed as to whether you'll have the symlink or not). + +`cd` into that timestamped directory and check that you have a `.env` symlink pointing back to the app directory's `.env` file. + +Next, try performing all of the steps yourself that would happen during a deploy: + +``` +yarn install +yarn rw prisma migrate deploy +yarn rw prisma generate +yarn rw dataMigrate up +yarn rw build +ln -nsf "$(pwd)" ../current +``` + +If they worked for you, the deploy process should have no problem as it runs the same commands (after all, it connects via SSH and runs the same commands you just did!) + +Next we can check that the site is being served correctly. Run `yarn rw serve` and make sure your processes start and are accessible (by default on port 8910): + +```bash +curl http://localhost:8910 +# or +wget http://localhost:8910 +``` + +If you don't see the content of your `web/src/index.html` file then something isn't working. You'll need to fix those issues before you can deploy. Verify the api side is responding: + +```bash +curl http://localhost:8910/.redwood/functions/graphql?query={redwood{version}} +# or +wget http://localhost:8910/.redwood/functions/graphql?query={redwood{version}} +``` + +You should see something like: + +```json +{ + "data": { + "redwood": { + "version": "1.0.0" + } + } +} +``` + +If so then your API side is up and running! The only thing left to test is that the api side has access to the database. This call would be pretty specific to your app, but assuming you have port 8910 open to the world you could simply open a browser to click around to find a page that makes a database request. + +Was the problem with starting your PM2 process? That will be harder to debug here in this doc, but visit us in the [forums](https://community.redwoodjs.com) or [Discord](https://discord.gg/redwoodjs) and we'll try to help! + +:::note My pm2 processes are running but your app has errors, how do I see them? + +If your processes are up and running in pm2 you can monitor their log output. Run `pm2 monit` and get a nice graphical interface for watching the logs on your processes. Press the up/down arrows to move through the processes and left/right to switch panes. + +![pm2 monit screenshot](https://user-images.githubusercontent.com/300/213776175-2f78d9d4-7e6e-4d69-81b2-a648cc37b6ea.png) + +Sometimes the log messages are too long to read in the pane at the right. In that case you can watch them live by "tailing" them right in the terminal. pm2 logs are written to `~/.pm2/logs` and are named after the process name and id, and whether they are standard output or error messages. Here's an example directory listing: + +``` +ubuntu@ip-123-45-67-89:~/.pm2/logs$ ll +total 116 +drwxrwxr-x 2 ubuntu ubuntu 4096 Jan 20 17:58 ./ +drwxrwxr-x 5 ubuntu ubuntu 4096 Jan 20 17:40 ../ +-rw-rw-r-- 1 ubuntu ubuntu 0 Jan 20 17:58 api-error-0.log +-rw-rw-r-- 1 ubuntu ubuntu 0 Jan 20 17:58 api-error-1.log +-rw-rw-r-- 1 ubuntu ubuntu 27788 Jan 20 18:11 api-out-0.log +-rw-rw-r-- 1 ubuntu ubuntu 21884 Jan 20 18:11 api-out-1.log +``` + +To watch a log live, run: + +```terminal +tail -f ~/.pm2/logs/api-out-0.log +``` + +Note that if you have more than one process running, like we do here, requesting a page on the website will send the request to one of available processes randomly, so you may not see your request show up unless you refresh a few times. Or you can connect to two separate SSH sessions and tail both of the log files at the same time. + +::: + +## Starting Processes on Server Restart + +The `pm2` service requires some system "hooks" to be installed so it can boot up using your system's service manager. Otherwise, your PM2 services will need to be manually started again on a server restart. These steps only need to be run the first time you install PM2. + +SSH into your server and then run: + +```bash +pm2 startup +``` + +You will see some output similar to the output below. We care about the output after "copy/paste the following command:" You'll need to do just that: copy the command starting with `sudo` and then paste and execute it. *Note* this command uses `sudo` so you'll need the root password to the machine in order for it to complete successfully. + +:::warning + +The below text is *example* output, yours will be different, don't copy and paste ours! + +::: + +```bash +$ pm2 startup +[PM2] Init System found: systemd +[PM2] To setup the Startup Script, copy/paste the following command: +// highlight-next-line +sudo env PATH=$PATH:/home/ubuntu/.nvm/versions/node/v16.13.2/bin /home/ubuntu/.nvm/versions/node/v16.13.2/lib/node_modules/pm2/bin/pm2 startup systemd -u ubuntu --hp /home/ubuntu +``` + +In this example, you would copy `sudo env PATH=$PATH:/home/ubuntu/.nvm/versions/node/v16.13.2/bin /home/ubuntu/.nvm/versions/node/v16.13.2/lib/node_modules/pm2/bin/pm2 startup systemd -u ubuntu --hp /home/ubuntu` and run it. You should get a bunch of output along with `[PM2] [v] Command successfully executed.` near the end. Now if your server restarts for whatever reason, your PM2 processes will be restarted once the server is back up. + +## Customizing the Deploy + +There are several ways you can customize the deploys steps, whether that's skipping steps completely, or inserting your own commands before or after the default ones. + +### Skipping Steps + +If you want to speed things up you can skip one or more steps during the deploy. For example, if you have no database migrations, you can skip them completely and save some time: + +```bash +yarn rw deploy baremetal production --no-migrate +``` + +Run `yarn rw deploy baremetal --help` for the full list of flags. You can set them as `--migrate=false` or use the `--no-migrate` variant. + +### Inserting Custom Commands + +Baremetal supports running your own custom commands before or after the regular deploy commands. You can run commands **before** and/or **after** the built-in commands. Your custom commands are defined in the `deploy.toml` config file. The existing commands that you can hook into are: + +1. `update` - cloning the codebase +2. `symlinkEnv` - symlink the new deploy's `.env` to shared one in the app dir +3. `install` - `yarn install` +4. `migrate` - database migrations +5. `build` - `yarn build` (your custom before/after command is run for each side being built) +6. `symlinkCurrent` - symlink the new deploy dir to `current` in the app dir +7. `restart` - (re)starting any pm2 processes (your custom command will run before/after each process is restarted) +8. `cleanup` - cleaning up any old releases + +You can define your before/after commands in three different places: + +* Globally - runs for any environment +* Environment specific - runs for only a single environment +* Server specific - runs for only a single server in a single environment + +:::warning + +Custom commands are run in the new **deploy** directory, not the root of your application directory. During a deploy the `current` symlink will point to the previous directory while your code is executed in the new one, before the `current` symlink location is updated. + +```bash +drwxrwxr-x 5 ubuntu ubuntu 4096 May 10 18:20 ./ +drwxr-xr-x 7 ubuntu ubuntu 4096 Apr 27 17:43 ../ +drwxrwxr-x 2 ubuntu ubuntu 4096 May 9 22:59 20220503211428/ +drwxrwxr-x 2 ubuntu ubuntu 4096 May 9 22:59 20220503211429/ +drwxrwxr-x 10 ubuntu ubuntu 4096 May 10 18:18 20220510181730/ <-- commands are run in here +lrwxrwxrwx 1 ubuntu ubuntu 14 May 10 18:19 current -> 20220503211429/ +-rw-rw-r-- 1 ubuntu ubuntu 1167 Apr 22 20:49 .env +``` + +::: + +#### Syntax + +Global events are defined in a `[before]` and/or `[after]` block in your `deploy.toml` file: + +```toml +[before] +install = "touch install.lock" + +[after] +install = "rm install.lock" + +[[production.servers]] +host = 'server.com' +# ... +``` + +Environment specific commands are defined in a `[[environment.before]]` and `[[environment.after]]` block: + +```toml +[production.before] +install = "touch prod-install.lock" + +[production.after] +install = "rm prod-install.lock" + +[production.servers] +host = 'server.com' +# ... +``` + +Server specific commands are defined with a `before.command` and `after.command` key directly in your server config: + +```toml +[[production.servers]] +host = 'server.com' +# ... +before.install = 'touch server-install.lock' +after.install = 'rm server-install.lock' +``` + +You can define commands as a string, or an array of strings if you want to run multiple commands: + +```toml +[before] +install = ["echo 'started at $(date)' > install.lock", "cp -R . ../backup"] + +[[production.servers]] +host = 'server.com' +# ... +``` + +You can include commands in any/all of the three configurations (global, env and server) and they will all be stacked up and run in that order: `global -> environment -> server`. For example: + + +```toml +[[production.servers]] +host = 'server.com' +# ... +before.install = 'touch server-install.lock' + +[production.before] +install = ['touch prod-install1.lock', 'touch prod-install2.lock'] + +[before] +install = 'touch install.lock' +``` + +Would result in the commands running in this order, all before running `yarn install`: + +1. `touch install.lock` +2. `touch prod-install1.lock` +3. `touch prod-install2.lock` +4. `touch server-install.lock` + +## Rollback + +If you deploy and find something has gone horribly wrong, you can rollback your deploy to the previous release: + +```bash +yarn rw deploy baremetal production --rollback +``` + +You can even rollback multiple deploys, up to the total number you still have denoted with the `keepReleases` option: + +```bash +yarn rw deploy baremetal production --rollback 3 +``` + +Note that this will *not* rollback your database—if you had a release that changed the database, that updated database will still be in effect, but with the previous version of the web and api sides. Trying to undo database migrations is a very difficult proposition and isn't even possible in many cases. + +Make sure to thoroughly test releases that change the database before doing it for real! + +## Maintenance Page + +If you find that you have a particular complex deploy, one that may involve incompatible database changes with the current codebase, or want to make sure that database changes don't occur while in the middle of a deploy, you can put up a maintenance page: + +```bash +yarn rw deploy baremetal production --maintenance up +``` + +It does this by replacing `web/dist/200.html` with `web/src/maintenance.html`. This means any new web requests, at any URL, will show the maintenance page. This process also stops any services listed in the `processNames` option of `deploy.toml`—this is important for the api server as it will otherwise keep serving requests to users currently running the app, even though no *new* users can get the Javascript packages required to start a new session in their browser. + +You can remove the maintenance page with: + +```bash +yarn rw deploy baremetal production --maintenance down +``` + +Note that the maintenance page will automatically come down as the result of a new deploy as it checks out a new copy of the codebase (with a brand new copy of `web/dist/200.html` and will automatically restart services (bring them all back online). + +## Monitoring + +PM2 has a nice terminal-based dashboard for monitoring your services: + +```bash +pm2 monit +``` + +![pm2 dashboard](https://user-images.githubusercontent.com/300/164799386-84442fa3-8e68-4cc6-9e64-928b8e32731a.png) + +And even a web-based UI with paid upgrades if you need to give normies access to your monitoring data: + +![pm2 web dashboard](https://user-images.githubusercontent.com/300/164799541-6fe321fa-4d7c-44f7-93c6-3c202638da4f.png) + +## Example Server Configurations + +The default configuration, which requires the least amount of manual configuration, is to serve both the web and api sides, with the web side being bound to port 8910. This isn't really feasible for a general web app which should be available on port 80 (for HTTP) and/or port 443 (for HTTPS). Here are some custom configs to help. + +### Redwood Serves Web and Api Sides, Bind to Port 80 + +This is almost as easy as the default configuration, you just need to tell Redwood to bind to port 80. However, most *nix distributions will not allow a process to bind to ports lower than 1024 without root/sudo permissions. There is a command you can run to allow access to a specific binary (`node` in this case) to bind to one of those ports anyway. + +#### Tell Redwood to Bind to Port 80 + +Update the `[web]` port: + +```diff title="redwood.toml" +[web] + title = "My Application" + apiUrl = "/.netlify/functions" ++ port = 80 +[api] + port = 8911 +[browser] + open = true +``` + +#### Allow Node to Bind to Port 80 + +Use the [setcap](https://man7.org/linux/man-pages/man7/capabilities.7.html) utility to provide access to lower ports by a given process: + +```bash +sudo setcap CAP_NET_BIND_SERVICE=+eip $(which node) +``` + +Now restart your service and it should be available on port 80: + +```bash +pm2 restart serve +``` + +This should get your site available on port 80 (for HTTP), but you really want it available on port 443 (for HTTPS). That won't be easy if you continue to use Redwood's internal web server. See the next recipe for a solution. + +### Redwood Serves Api, Nginx Serves Web Side + +[nginx](https://www.nginx.com/) is a very robust, dedicated web server that can do a better job of serving our static web-side files than Redwood's own built-in web server (Fastify) which isn't really configured in Redwood for a high traffic, production website. + +If nginx will be serving our web side, what about api-side? Redwood's internal API server will be running, but on the default port of 8911. But browsers are going to want to connect on port 80 (HTTP) or 443 (HTTPS). nginx takes care of this as well: it will [proxy](https://docs.nginx.com/nginx/admin-guide/web-server/reverse-proxy/) (forward) any requests to a path of your choosing (like the default of `/.redwood/functions`) to port 8911 behind the scenes, then return the response to the browser. + +This doc isn't going to go through installing and getting nginx running, there are plenty of resources for that available. What we will show is a successful nginx configuration file used by several Redwood apps currently in production. + +```text title="nginx.conf" +upstream redwood_server { + server 127.0.0.1:8911 fail_timeout=0; +} + +server { + root /var/www/myapp/current/web/dist; + server_name myapp.com; + index index.html; + + gzip on; + gzip_min_length 1000; + gzip_types application/json text/css application/javascript application/x-javascript; + + sendfile on; + + keepalive_timeout 65; + + error_page 404 /404.html; + error_page 500 /500.html; + + location / { + try_files $uri /200.html =404; + } + + location ^~ /static/ { + gzip_static on; + expires max; + add_header Cache-Control public; + } + + location ~ /.redwood/functions(.*) { + rewrite ^/.redwood/functions(.*) $1 break; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_pass http://redwood_server; + } +} +``` + +Now when you start Redwood, you're only going to start the api server: + +``` +yarn rw serve api +``` + +When using `pm2` to start/monitor your processes, you can simplify your `deploy.toml` and `ecosystem.config.js` files to only worry about the api side: + +```toml title="deploy.toml" +[[production.servers]] +host = "myserver.com" +username = "ubuntu" +agentForward = true +sides = ["api", "web"] +path = "/var/www/myapp" +// highlight-next-line +processNames = ["api"] +repo = "git@github.com:redwoodjs/myapp.git" +branch = "main" +keepReleases = 3 +packageManagerCommand = "yarn" +monitorCommand = "pm2" +``` + +```js title="ecosystem.config.js" +module.exports = { + apps: [ + { + name: 'api', + cwd: 'current', + script: 'node_modules/.bin/rw', + args: 'serve api', + instances: 'max', + exec_mode: 'cluster', + wait_ready: true, + listen_timeout: 10000, + } + ] +} +``` + +This is the bare minimum to get your site served over HTTP, insecurely. After verifying that your site is up and running, we recommend using [Let's Encrypt](https://www.digitalocean.com/community/tutorials/how-to-secure-nginx-with-let-s-encrypt-on-ubuntu-20-04) to provision a SSL cert and it will also automatically update your nginx config so everything is served over HTTPS. + +#### Custom API Path + +If you don't love the path of `/.redwood/functions` for your API calls, this is easy to change. You'll need to tell Redwood to use a different path in development, and then let nginx know about that same path so that it resolves the same in production. + +For example, to simplify the path to just `/api` you'll need to make a change to `redwood.toml` and your new nginx config file: + +```toml title="redwood.toml" +[web] + title = "My App" + port = 8910 + host = '0.0.0.0' +// highlight-next-line + apiUrl = "/api" +[api] + port = 8911 +[browser] + open = true +``` + +```text title="nginx.conf" +upstream redwood_server { + server 127.0.0.1:8911 fail_timeout=0; +} + +server { + root /var/www/myapp/current/web/dist; + server_name myapp.com; + index index.html; + + gzip on; + gzip_min_length 1000; + gzip_types application/json text/css application/javascript application/x-javascript; + + sendfile on; + + keepalive_timeout 65; + + error_page 404 /404.html; + error_page 500 /500.html; + + location / { + try_files $uri /200.html =404; + } + + location ^~ /static/ { + gzip_static on; + expires max; + add_header Cache-Control public; + } + +// highlight-next-line + location ~ /api(.*) { +// highlight-next-line + rewrite ^/api(.*) $1 break; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_pass http://redwood_server; + } +} +``` diff --git a/docs/versioned_docs/version-7.0/deploy/coherence.md b/docs/versioned_docs/version-7.0/deploy/coherence.md new file mode 100644 index 000000000000..a2b9ec845d2a --- /dev/null +++ b/docs/versioned_docs/version-7.0/deploy/coherence.md @@ -0,0 +1,40 @@ +--- +description: Serverful deploys on GCP or AWS via Coherence's full-lifecycle environment automation +--- + +# Deploy to Coherence + +[Coherence](https://www.withcoherence.com/) delivers automated environments across the full software development lifecycle, without requiring you to glue together your own mess of open source tools to get a world-class develper experience for your team. Coherence is focused on serving startups, who are doing mission-critical work. With one simple configuration, Coherence offers: + +- Cloud-hosted development environments, based on VSCode. Similar to Gitpod or GitHub CodeSpaces +- Production-ready CI/CD running in your own GCP/AWS account, including: database migration/seeding/snapshot loading, parallelized tests, container building and docker registry management +- Full-stack branch previews. Vercel/Netlify-like developer experience for arbitrary container apps, including dependencies such as CDN, redis, and database resources +- Staging and production environment management in your AWS/GCP accounts. Production runs in its own cloud account (AWS) or project (GCP). Integrated secrets management across all environment types with a developer-friendly UI + +## Coherence Prerequisites + +To deploy to Coherence, your Redwood project needs to be hosted on GitHub and you must have an [AWS](https://docs.withcoherence.com/docs/overview/aws-deep-dive) or [GCP](https://docs.withcoherence.com/docs/overview/gcp-deep-dive) account. + +## Coherence Deploy + +:::warning Prerender doesn't work with Coherence yet + +You can see its current status and follow updates here on GitHub: https://github.com/redwoodjs/redwood/issues/8333. + +But if you don't use prerender, carry on! + +::: + +If you want to deploy your Redwood project on Coherence, run the setup command: + +``` +yarn rw setup deploy coherence +``` + +The command will inspect your Prisma config to determine if you're using a supported database (at the moment, only `postgres` or `mysql` are supported on Coherence). + +Then follow the [Coherence Redwood deploy docs](https://docs.withcoherence.com/docs/configuration/frameworks#redwood-js) for more information, including if you want to set up: +- a redis server +- database migration/seeding/snapshot loading +- cron jobs or async workers +- object storage using Google Cloud Storage or AWS's S3 diff --git a/docs/versioned_docs/version-7.0/deploy/edgio.md b/docs/versioned_docs/version-7.0/deploy/edgio.md new file mode 100644 index 000000000000..c821c3ee2333 --- /dev/null +++ b/docs/versioned_docs/version-7.0/deploy/edgio.md @@ -0,0 +1,16 @@ +# Deploy to Edgio + +[Edgio](https://edg.io) extends the capabilities of a traditional CDN by not only hosting your static content, but also providing server-side rendering for progressive web applications as well as caching both your APIs and HTML at the network edge to provide your users with the fastest browsing experience. + +## Edgio Deploy Setup + +In order to deploy your RedwoodJS project to Edgio, the project must first be initialized with the Edgio CLI. + +1. In your project, run the command `yarn rw setup deploy edgio`. +2. Verify the changes to your project, commit and push to your repository. +3. Deploy your project to Edgio + 1. If this is your first time deploying to Edgio, the interactive CLI will prompt to authenticate using your browser. You can start the deploy by running `yarn rw deploy edgio`. + 2. If you are deploying from a **non-interactive** environment, you will need to create an account on [Edgio Developer Console](https://app.layer0.co) first and setup a [deploy token](https://docs.edg.io/guides/deploy_apps#deploy-from-ci). Once the deploy token is created, save it as a secret to your environment. You can start the deploy by running `yarn rw deploy edgio --token=XXX`. +4. Follow the link in the output to view your site live once deployment has completed! + +For more information on deploying to Edgio, check out the [documentation](https://docs.edg.io). diff --git a/docs/versioned_docs/version-7.0/deploy/flightcontrol.md b/docs/versioned_docs/version-7.0/deploy/flightcontrol.md new file mode 100644 index 000000000000..192efd1776b6 --- /dev/null +++ b/docs/versioned_docs/version-7.0/deploy/flightcontrol.md @@ -0,0 +1,24 @@ +--- +description: How to deploy a Redwood app to AWS via Flightcontrol +--- + +# Deploy to AWS with Flightcontrol + +[Flightcontrol](https://www.flightcontrol.dev?ref=redwood) enables any developer to deploy to AWS without being a wizard. It's extremely easy to use but lets you pop the hood and leverage the raw power of AWS when needed. It supports servers, static sites, and databases which makes it a perfect fit for hosting scalable Redwood apps. + +## Flightcontrol Deploy Setup + +1. In your project, run the command `yarn rw setup deploy flightcontrol --database=YOUR_DB_TYPE` where YOUR_DB_TYPE is `mysql` or `postgresql` +2. Commit the changes and push to github +3. If you don't have an account, sign up at [app.flightcontrol.dev/signup](https://app.flightcontrol.dev/signup?ref=redwood) +4. Create a new project from the onboarding screen or project list + 1. Connect your Github account and select your repo + 2. Select "Config Type" as `flightcontrol.json` + 3. Click "Create Project" and complete any required steps like linking your AWS account. +5. If using dbAuth, add the session secret key env variable in the Flightcontrol dashboard + + +NOTE: If you are using yarn v1, remove the `installCommand`'s from flightcontrol.json + +If you have *any* problems or questions, Flightcontrol is very responsive in [their support Discord](https://discord.gg/yY8rSPrD6q). + diff --git a/docs/versioned_docs/version-7.0/deploy/introduction.md b/docs/versioned_docs/version-7.0/deploy/introduction.md new file mode 100644 index 000000000000..1cbb38e0fcd7 --- /dev/null +++ b/docs/versioned_docs/version-7.0/deploy/introduction.md @@ -0,0 +1,104 @@ +--- +description: Deploy to serverless or serverful providers +--- + +# Introduction to Deployment + +Redwood is designed for both serverless and traditional infrastructure deployments, offering a unique continuous deployment process in both cases: + +1. code is committed to a repository on GitHub, GitLab, or Bitbucket, which triggers the deployment +2. the Redwood API Side and Web Side are individually prepared via a build process +3. during the build process, any database related actions are run (e.g. migrations) +4. the hosting provider deploys the built Web static assets to a CDN and the API code to a serverless backend (e.g. AWS Lambdas) + +Currently, these are the officially supported deploy targets: +- Baremetal (physical server that you have SSH access to) +- [Coherence](https://www.withcoherence.com/) +- [Flightcontrol.dev](https://www.flightcontrol.dev?ref=redwood) +- [Edg.io](https://edg.io) +- [Netlify.com](https://www.netlify.com/) +- [Render.com](https://render.com) +- [Serverless.com](https://serverless.com) +- [Vercel.com](https://vercel.com) + +Redwood has a CLI generator that adds the code and configuration required by the specified provider (see the [CLI Doc](cli-commands.md#deploy-config) for more information): +```shell +yarn rw setup deploy <provider> +``` + +There are examples of deploying Redwood on other providers such as Google Cloud and direct to AWS. You can find more information by searching the [GitHub Issues](https://github.com/redwoodjs/redwood/issues) and [Forums](https://community.redwoodjs.com). + + +## General Deployment Setup + +Deploying Redwood requires setup for the following four categories. + +### 1. Host Specific Configuration + +Each hosting provider has different requirements for how (and where) the deployment is configured. Sometimes you'll need to add code to your repository, configure settings in a dashboard, or both. You'll need to read the provider specific documentation. + +The most important Redwood configuration is to set the `apiUrl` in your `redwood.toml` This sets the API path for your serverless functions specific to your hosting provider. + +### 2. Build Command + +The build command is used to prepare the Web and API for deployment. Additionally, other actions can be run during build such as database migrations. The Redwood build command must specify one of the supported hosting providers (aka `target`): + +```shell +yarn rw deploy <target> +``` + +For example: + +```shell +# Build command for Netlify deploy target +yarn rw deploy netlify +``` + +```shell +# Build command for Vercel deploy target +yarn rw deploy vercel +``` + +```shell +# Build command for AWS Lambdas using the https://serverless.com framework +yarn rw deploy serverless --side api +``` + +```shell +# Build command for Edgio deploy target +yarn rw deploy edgio +``` + +```shell +# Build command for baremetal deploy target +yarn rw deploy baremetal [--first-run] +``` + +### 3. Prisma and Database + +Redwood uses Prisma for managing database access and migrations. The settings in `api/prisma/schema.prisma` must include the correct deployment database, e.g. postgresql, and the database connection string. + +To use PostgreSQL in production, include this in your `schema.prisma`: + +```jsx +datasource db { + provider = "postgresql" + url = env("DATABASE_URL") +} +``` + +The `url` setting above accesses the database connection string via an environment variable, `DATABASE_URL`. Using env vars is the recommended method for both ease of development process as well as security best practices. + +Whenever you make changes to your `schema.prisma`, you must run the following command: + +```shell +yarn rw prisma migrate dev # creates and applies a new Prisma DB migration +``` + +> Note: when setting your production DATABASE_URL env var, be sure to also set any connection-pooling or sslmode parameters. For example, if using Supabase Postgres with pooling, then you would use a connection string similar to `postgresql://postgres:mydb.supabase.co:6432/postgres?sslmode=require&pgbouncer=true` that uses a specific 6432 port, informs Prisma to consider pgBouncer, and also to use SSL. See: [Connection Pooling](connection-pooling.md) for more info. + +### 4. Environment Variables + +Any environment variables used locally, e.g. in your `env.defaults` or `.env`, must also be added to your hosting provider settings. (See documentation specific to your provider.) + +Additionally, if your application uses env vars on the Web Side, you must configure Redwood's build process to make them available in production. See the [Redwood Environment Variables doc](environment-variables.md) for instructions. diff --git a/docs/versioned_docs/version-7.0/deploy/netlify.md b/docs/versioned_docs/version-7.0/deploy/netlify.md new file mode 100644 index 000000000000..ad62b9b5d8f5 --- /dev/null +++ b/docs/versioned_docs/version-7.0/deploy/netlify.md @@ -0,0 +1,27 @@ +--- +description: The serverless git deploy you know and love +--- + +# Deploy to Netlify + +## Netlify tl;dr Deploy + +If you simply want to experience the Netlify deployment process without a database and/or adding custom code, you can do the following: + +1. create a new redwood project: `yarn create redwood-app ./netlify-deploy` +2. after your "netlify-deploy" project installation is complete, init git, commit, and add it as a new repo to GitHub, BitBucket, or GitLab +3. run the command `yarn rw setup deploy netlify` and commit and push changes +4. use the Netlify [Quick Start](https://app.netlify.com/signup) to deploy + +:::warning +While you may be tempted to use the [Netlify CLI](https://cli.netlify.com) commands to [build](https://cli.netlify.com/commands/build) and [deploy](https://cli.netlify.com/commands/deploy) your project directly from you local project directory, doing so **will lead to errors when deploying and/or when running functions**. I.e. errors in the function needed for the GraphQL server, but also other serverless functions. + +The main reason for this is that these Netlify CLI commands simply build and deploy -- they build your project locally and then push the dist folder. That means that when building a RedwoodJS project, the [Prisma client is generated with binaries matching the operating system at build time](https://cli.netlify.com/commands/link) -- and not the [OS compatible](https://www.prisma.io/docs/reference/api-reference/prisma-schema-reference#binarytargets-options) with running functions on Netlify. Your Prisma client engine may be `darwin` for OSX or `windows` for Windows, but it needs to be `debian-openssl-1.1.x` or `rhel-openssl-1.1.x`. If the client is incompatible, your functions will fail. + + +Therefore, **please follow the [Tutorial Deployment section](tutorial/chapter4/deployment.md)** to sync your GitHub (or other compatible source control service) repository with Netlify andalllow their build and deploy system to manage deployments. +::: + +## Netlify Complete Deploy Walkthrough + +For the complete deployment process on Netlify, see the [Tutorial Deployment section](tutorial/chapter4/deployment.md). diff --git a/docs/versioned_docs/version-7.0/deploy/render.md b/docs/versioned_docs/version-7.0/deploy/render.md new file mode 100644 index 000000000000..705ec3dec80f --- /dev/null +++ b/docs/versioned_docs/version-7.0/deploy/render.md @@ -0,0 +1,15 @@ +--- +description: Serverful deploys via Render's unified cloud +--- + +# Deploy to Render + +Render is a unified cloud to build and run all your apps and websites with free SSL, a global CDN, private networks and auto-deploys from Git — **database included**! + +## Render tl;dr Deploy + +If you simply want to experience the Render deployment process, including a Postgres or SQLite database, you can do the following: +1. create a new redwood project: `yarn create redwood-app ./render-deploy` +2. after your "render-deploy" project installation is complete, init git, commit, and add it as a new repo to GitHub or GitLab +3. run the command `yarn rw setup deploy render`, use the flag `--database` to select from `postgresql`, `sqlite` or `none` to proceed without a database [default : `postgresql`] +4. follow the [Render Redwood Deploy Docs](https://render.com/docs/deploy-redwood) for detailed instructions diff --git a/docs/versioned_docs/version-7.0/deploy/serverless.md b/docs/versioned_docs/version-7.0/deploy/serverless.md new file mode 100644 index 000000000000..d854ff791f7c --- /dev/null +++ b/docs/versioned_docs/version-7.0/deploy/serverless.md @@ -0,0 +1,131 @@ +--- +description: Deploy to AWS with Serverless Framework +--- + +# Deploy to AWS with Serverless Framework + +>⚠️ **Deprecated** +>As of Redwood v5, we are deprecating this deploy setup as an "officially" supported provider. This means: +>- For projects already using this deploy provider, there will be NO change at this time +>- Both the associated `setup` and `deploy` commands will remain in the framework as is; when setup is run, there will be a “deprecation” message +>- We will no longer run CI/CD on the Serverless-AWS deployments, which means we are no longer guaranteeing this deploy works with each new version +>- We are exploring better options to deploy directly to AWS Lambdas; the current deploy commands will not be removed until we find a replacement +> +>For more details (e.g. why?) and current status, see the Forum post ["Deprecating support for Serverless Framework Deployments to AWS Lambdas"](https://community.redwoodjs.com/t/deprecating-support-for-serverless-framework-deployments-to-aws-lambdas/4755/10) + +>The following instructions assume you have read the [General Deployment Setup](./introduction.md#general-deployment-setup) section above. + +Yes, the name is confusing, but Serverless provides a very interesting option—deploy to your own cloud service account and skip the middleman entirely! By default, Serverless just orchestrates starting up services in your cloud provider of choice and pushing your code up to them. Any bill you receive is from your hosting provider (although many offer a generous free tier). You can optionally use the [Serverless Dashboard](https://www.serverless.com/dashboard/) to monitor your deploys and setup CI/CD to automatically deploy when pushing to your repo of choice. If you don't setup CI/CD you actually deploy from your development machine (or another designated machine you've setup to do the deployment). + +Currently we default to deploying to AWS. We'd like to add more providers in the future but need help from the community in figuring out what services are equivalent to the ones we're using in AWS (Lambda for the api-side and S3/CloudFront for the web-side). + +We'll handle most of the deployment commands for you, you just need an [AWS account](https://www.serverless.com/framework/docs/providers/aws/guide/credentials#sign-up-for-an-aws-account) and your [access/secret keys](https://www.serverless.com/framework/docs/providers/aws/guide/credentials#create-an-iam-user-and-access-key) before we begin. + +## Setup + +One command will set you up with (almost) everything you need: + +```bash +yarn rw setup deploy serverless +``` + +As you'll see mentioned in the post-install instructions, you'll need to provide your AWS Access and AWS Secret Access keys. Add those to the designated places in your `.env` file: + +```bash +# .env + +AWS_ACCESS_KEY_ID=<your-key-here> +AWS_SECRET_ACCESS_KEY=<your-secret-key-here> +``` + +Make sure you don't check `.env` into your repo! It's set in `.gitignore` by default, so make sure it stays that way. + +## First Deploy + +You'll need to add a special flag to the deploy command for your first deploy: + +```bash +yarn rw deploy serverless --first-run +``` + +The first time you deploy your app we'll first deploy just the API side. Once it's live we can get the URL that it's been deployed to and add that as an environment variable `API_URL` so that web side will know what it is during build-time (it needs to know where to send GraphQL and function requests). + +Half-way through the first deploy you'll be asked if you want to add the API_URL to `.env.production` (which is similar to `.env` but is only used when `NODE_ENV=production`, like when building the web and api sides for deploy). Make sure you say `Y`es at this prompt and then it will continue to deploy the web side. + +Once that command completes you should see a message including the URL of your site—open that URL and hopefully everything works as expected! + +> **Heads up** +> +> If you're getting an error trying to load data from the API side, its possible you're still pointing at your local database. +> +> Remember to add a DATABASE_URL env var to your `.env.production` file that is created, pointing at the database you want to use on your deployed site. Since your stack is on AWS, RDS might be a good option, but you might find it easier/quicker to setup databases on other providers too, such as [Railway](https://railway.app/) or [Supabase](https://supabase.com/) + +## Subsequent Deploys + +From now on you can simply run `yarn rw deploy serverless` when you're ready to deploy (which will also be much faster). + + +:::info +Remember, if you add or generate new serverless functions (or endpoints), you'll need to update the configuration in your serverless.yml in `./api/serverless.yml`. + +By default we only configure the `auth` and `graphql` functions for you. +::: + +## Environment Variables + +For local deployment (meaning you're deploying from your own machine, or another that you're in control of) you can put any ENV vars that are production-only into `.env.production`. They will override any same-named vars in `.env`. Make sure neither of these files is checked into your code repository! + +If you're setting up CI/CD and deploying from the Serverless Dashboard, you'll need to copy your required ENV vars up to your app on Serverless and then tell it where to get them from. In `api/serverless.yml` and `web/serverless.yml` look for the `provider > environment` section. You'll need to list any ENV vars here, using the `${param:VAR_NAME}` syntax, which means to get them from the Serverless Dashboard "parameters" (which is what they call environment variables, for some strange reason). + +There are even more places you can get environment variables from, check out Serverless's [Variables documentation](https://www.serverless.com/framework/docs/providers/aws/guide/variables) for more. + +## Serverless Dashboard + +> **Note:** +> Serverless Dashboard CI/CD does not support projects structured like Redwood, although they're working on it. For CD, you'll need to use something like GitHub Actions. +> +> It can still be worthwhile to integrate your project with Serverless Dashboard — you'll have features like deploy logs and monitoring, analytics, secret management, and AWS account integration. You can also [authenticate into your Serverless account within a CI context](https://www.serverless.com/framework/docs/guides/cicd/running-in-your-own-cicd). Just remember that if you do use the Dashboard to manage secrets, you'll need to use the `${param:VAR_NAME}` syntax. + +To integrate your site into the Serverless Dashboard, there are two ways: + +1. Run `yarn serverless login` and a browser *should* open asking you to allow permission. However, in our experience, this command will fail nearly 50% of the time complaining about an invalid URL. If it *does* work you can then run `yarn serverless` in both the `api` and `web` directories to link to them an existing app in the Dashboard, or you'll be prompted to create a new one. Future deploys will now be monitored on the Dashboard. +2. You can manually add the `org` and `app` lines in `api/serverless.yml` and `web/serverless.yml`. You'll see example ones commented out near the top of the file. + +## Environments Besides Production + +By default we assume you want to deploy to a production environment, but Serverless lets you deploy anywhere. They call these destinations "stages", and in Redwood "production" is the default. Check out their [Managing Staging and Environments blog post](https://www.serverless.com/blog/stages-and-environments) for details. + +Once configured, just add the stage to your deploy command: + +```bash +yarn rw deploy serverless --stage qa +``` + +## Removing Your Deploy + +In addition to creating all of the services necessary for your app to run, Serverless can also remove them (which is great when testing to avoid paying for services you're no longer using). + +You'll need to run this command in both the `api` and `web` directories: + +```bash +yarn serverless remove --stage production +``` + +Note that `production` is the default stage when you deploy with `yarn rw serverless deploy` - if you have customized this, you have to use the same stage as you deployed with! + +This will take several minutes, so grab your favorite beverage and enjoy your new $0 monthly bill! + +:::tip Pro tip +If you get tired of typing `serverless` each time, you can use the much shorter `sls` alias: `yarn rw deploy sls` +::: + +## Troubleshooting + +If you happen to see the following error when deploying: + +```terminal +Error: +No auth.zip file found in the package path you provided. +``` + +Make sure that the dev server isn't running, then retry your deploy. diff --git a/docs/versioned_docs/version-7.0/deploy/vercel.md b/docs/versioned_docs/version-7.0/deploy/vercel.md new file mode 100644 index 000000000000..5f4e6e33fe04 --- /dev/null +++ b/docs/versioned_docs/version-7.0/deploy/vercel.md @@ -0,0 +1,90 @@ +--- +description: Deploy serverless in an instant with Vercel +--- + +# Deploy to Vercel + +>The following instructions assume you have read the [General Deployment Setup](./introduction.md#general-deployment-setup) section above. + +## Vercel tl;dr Deploy + +If you simply want to experience the Vercel deployment process without a database and/or adding custom code, you can do the following: +1. create a new redwood project: `yarn create redwood-app ./vercel-deploy` +2. after your "vercel-deploy" project installation is complete, init git, commit, and add it as a new repo to GitHub, BitBucket, or GitLab +3. run the command `yarn rw setup deploy vercel` and commit and push changes +4. use the Vercel [Quick Start](https://vercel.com/#get-started) to deploy + +_If you choose this quick deploy experience, the following steps do not apply._ + +## Redwood Project Setup + +If you already have a Redwood project, proceed to the next step. + +Otherwise, we recommend experiencing the full Redwood DX via the [Redwood Tutorial](tutorial/foreword.md). Simply return to these instructions when you reach the "Deployment" section. + +## Redwood Deploy Configuration + +Complete the following two steps. Then save, commit, and push your changes. + +### Step 1. Serverless Functions Path + +Run the following CLI Command: +```shell +yarn rw setup deploy vercel +``` + +This updates your `redwood.toml` file, setting `apiUrl = "/api"`: + +### Step 2. Database Settings + +Follow the steps in the [Prisma and Database](./introduction#3-prisma-and-database) section above. _(Skip this step if your project does not require a database.)_ + +### Vercel Initial Setup and Configuration +Either [login](https://vercel.com/login) to your Vercel account and select "Import Project" or use the Vercel [quick start](https://vercel.com/#get-started). + +Then select the "Continue" button within the "From Git Repository" section: +<img src="https://user-images.githubusercontent.com/2951/90482970-e6f3e700-e0e8-11ea-8b3e-979745b0a226.png" /> + +Next, select the provider where your repo is hosted: GitHub, GitLab, or Bitbucket. You'll be asked to login and then provider the URL of the repository, e.g. for a GitHub repo `https://github.com/your-account/your-project.git`. Select "Continue". + +You'll then need to provide permissions for Vercel to access the repo on your hosting provider. + +### Import and Deploy your Project +Vercel will recognize your repo as a Redwood project and take care of most configuration heavy lifting. You should see the following options and, most importantly, the "Framework Preset" showing RedwoodJS. + +<img src="https://user-images.githubusercontent.com/2951/90486275-9337cc80-e0ed-11ea-9af3-fd9613c1256b.png" /> + +Leave the **Build and Output Settings** at the default settings (unless you know what you're doing and have very specific needs). + +In the "Environment Variables" dropdown, add `DATABASE_URL` and your app's database connection string as the value. (Or skip if not applicable.) + +> When configuring a database, you'll want to append `?connection_limit=1` to the URI. This is [recommended by Prisma](https://www.prisma.io/docs/reference/tools-and-interfaces/prisma-client/deployment#recommended-connection-limit) when working with relational databases in a Serverless context. For production apps, you should setup [connection pooling](https://redwoodjs.com/docs/connection-pooling). + +For example, a postgres connection string should look like `postgres://<user>:<pass>@<url>/<db>?connection_limit=1` + +Finally, click the "Deploy" button. You'll hopefully see a build log without errors (warnings are fine) and end up on a screen that looks like this: + +<img src="https://user-images.githubusercontent.com/2951/90487627-9469f900-e0ef-11ea-9378-9bb85e02a792.png" /> + +Go ahead, click that "Visit" button. You’ve earned it 🎉 + +## Vercel Dashboard Settings + +From the Vercel Dashboard you can access the full settings and information for your Redwood App. The default settings seem to work just fine for most Redwood projects. Do take a look around, but be sure check out the [docs as well](https://vercel.com/docs). + +From now on, each time you push code to your git repo, Vercel will automatically trigger a deploy of the new code. You can also manually redeploy if you select "Deployments", then the specific deployment from the list, and finally the "Redeploy" option from the vertical dots menu next to "Visit". + +## vercel.json configuration + +By default, API requests in Vercel have a timeout limit of 15 seconds. To extend this duration, you can modify the vercel.json file by inserting the code snippet provided below. Please be aware that the ability to increase the timeout limit is exclusive to Pro plan subscribers. Additionally, it is important to note that the timeout can be increased up to a maximum of 300 seconds, which is equivalent to 5 minutes. + +``` +{ + "functions": { + "api/src/functions/graphql.*": { + "maxDuration": 120, + "runtime": "@vercel/redwood@2.0.5" + } + } +} +``` diff --git a/docs/versioned_docs/version-7.0/directives.md b/docs/versioned_docs/version-7.0/directives.md new file mode 100644 index 000000000000..b778dc2ef8ce --- /dev/null +++ b/docs/versioned_docs/version-7.0/directives.md @@ -0,0 +1,698 @@ +--- +description: Customize GraphQL execution +--- + +# Directives + +Redwood Directives are a powerful feature, supercharging your GraphQL-backed Services. + +You can think of directives like "middleware" that let you run reusable code during GraphQL execution to perform tasks like authentication and formatting. + +Redwood uses them to make it a snap to protect your API Services from unauthorized access. + +Here we call those types of directives **Validators**. + +You can also use them to transform the output of your query result to modify string values, format dates, shield sensitive data, and more! +We call those types of directives **Transformers**. + +You'll recognize a directive as being 1) preceded by `@` (e.g. `@myDirective`) and 2) declared alongside a field: + +```tsx +type Bar { + name: String! @myDirective +} +``` + +or a Query or a Mutation: + +```tsx +type Query { + bars: [Bar!]! @myDirective +} + +type Mutation { + createBar(input: CreateBarInput!): Bar! @myDirective +} +``` + +You can also define arguments that can be extracted and used when evaluating the directive: + +```tsx +type Bar { + field: String! @myDirective(roles: ["ADMIN"]) +} +``` + +or a Query or Mutation: + +```tsx +type Query { + bars: [Bar!]! @myDirective(roles: ["ADMIN"]) +} +``` + +You can also use directives on relations: + +```tsx +type Baz { + name: String! +} + +type Bar { + name: String! + bazzes: [Baz]! @myDirective +} +``` + +There are many ways to write directives using GraphQL tools and libraries. Believe us, it can get complicated fast. + +But, don't fret: Redwood provides an easy and ergonomic way to generate and write your own directives so that you can focus on the implementation logic and not the GraphQL plumbing. + +## What is a Redwood Directive? + +Redwood directives are purposeful. +They come in two flavors: **Validators** and **Transformers**. + +Whatever flavor of directive you want, all Redwood directives must have the following properties: + +- be in the `api/src/directives/{directiveName}` directory where `directiveName` is the directive directory +- must have a file named `{directiveName}.{js,ts}` (e.g. `maskedEmail.ts`) +- must export a `schema` and implement either a `validate` or `transform` function + +### Understanding the Directive Flow + +Since it helps to know a little about the GraphQL phases—specifically the Execution phase—and how Redwood Directives fit in the data-fetching and authentication flow, let's have a quick look at some diagrams. + +First, we see the built-in `@requireAuth` Validator directive that can allow or deny access to a Service (a.k.a. a resolver) based on Redwood authentication. +In this example, the `post(id: Int!)` query is protected using the `@requireAuth` directive. + +If the request's context has a `currentUser` and the app's `auth.{js|ts}` determines it `isAuthenticated()`, then the execution phase proceeds to get resolved (for example, the `post({ id })` Service is executed and queries the database using Prisma) and returns the data in the resulting response when execution is done. + +![require-auth-directive](https://user-images.githubusercontent.com/1051633/135320891-34dc06fc-b600-4c76-8a35-86bf42c7f179.png) + +In this second example, we add the Transformer directive `@welcome` to the `title` field on `Post` in the SDL. + +The GraphQL Execution phase proceeds the same as the prior example (because the `post` query is still protected and we'll want to fetch the user's name) and then the `title` field is resolved based on the data fetch query in the service. + +Finally after execution is done, then the directive can inspect the `resolvedValue` (here "Welcome to the blog!") and replace the value by inserting the current user's name—"Welcome, Tom, to the blog!" + +![welcome-directive](https://user-images.githubusercontent.com/1051633/135320906-5e2d639d-13a1-4aaf-85bf-98529822d244.png) + +### Validators + +Validators integrate with Redwood's authentication to evaluate whether or not a field, query, or mutation is permitted—that is, if the request context's `currentUser` is authenticated or belongs to one of the permitted roles. + +Validators should throw an Error such as `AuthenticationError` or `ForbiddenError` to deny access and simply return to allow. + +Here the `@isSubscriber` validator directive checks if the currentUser exists (and therefore is authenticated) and whether or not they have the `SUBSCRIBER` role. If they don't, then access is denied by throwing an error. + +```tsx +import { + AuthenticationError, + ForbiddenError, + createValidatorDirective, + ValidatorDirectiveFunc, +} from '@redwoodjs/graphql-server' +import { hasRole } from 'src/lib/auth' + +export const schema = gql` + directive @isSubscriber on FIELD_DEFINITION +` + +const validate: ValidatorDirectiveFunc = ({ context }) => { + if (!context.currentUser) { + throw new AuthenticationError("You don't have permission to do that.") + } + + if (!context.currentUser.roles?.includes('SUBSCRIBER')) { + throw new ForbiddenError("You don't have access to do that.") + } +} + +const isSubscriber = createValidatorDirective(schema, validate) + +export default isSubscriber +``` + +Since validator directives can access arguments (such as `roles`), you can quickly provide RBAC (Role-based Access Control) to fields, queries and mutations. + +```tsx +import gql from 'graphql-tag' + +import { createValidatorDirective } from '@redwoodjs/graphql-server' + +import { requireAuth as applicationRequireAuth } from 'src/lib/auth' +import { logger } from 'src/lib/logger' + +export const schema = gql` + directive @requireAuth(roles: [String]) on FIELD_DEFINITION +` + +const validate = ({ directiveArgs }) => { + const { roles } = directiveArgs + + applicationRequireAuth({ roles }) +} + +const requireAuth = createValidatorDirective(schema, validate) + +export default requireAuth +``` + +All Redwood apps come with two built-in validator directives: `@requireAuth` and `@skipAuth`. +The `@requireAuth` directive takes optional roles. +You may use these to protect against unwanted GraphQL access to your data. +Or explicitly allow public access. + +> **Note:** Validators evaluate prior to resolving the field value, so you cannot modify the value and any return value is ignored. + +### Transformers + +Transformers can access the resolved field value to modify and then replace it in the response. +Transformers apply to both single fields (such as a `User`'s `email`) and collections (such as a set of `Posts` that belong to `User`s) or is the result of a query. As such, Transformers cannot be applied to Mutations. + +In the first case of a single field, the directive would return the modified field value. In the latter case, the directive could iterate each `Post` and modify the `title` in each. In all cases, the directive **must** return the same expected "shape" of the data the SDL expects. + +> **Note:** you can chain directives to first validate and then transform, such as `@requireAuth @maskedEmail`. Or even combine transformations to cascade formatting a value (you could use `@uppercase` together with `@truncate` to uppercase a title and shorten to 10 characters). + +Since transformer directives can access arguments (such as `roles` or `maxLength`) you may fetch those values and use them when applying (or to check if you even should apply) your transformation. + +That means that a transformer directive could consider the `permittedRoles` in: + +```tsx +type user { + email: String! @maskedEmail(permittedRoles: ["ADMIN"]) +} +``` + +and if the `currentUser` is an `ADMIN`, then skip the masking transform and simply return the original resolved field value: + +```jsx title="./api/src/directives/maskedEmail.directive.js" +import { createTransformerDirective, TransformerDirectiveFunc } from '@redwoodjs/graphql-server' + +export const schema = gql` + directive @maskedEmail(permittedRoles: [String]) on FIELD_DEFINITION +` + +const transform: TransformerDirectiveFunc = ({ context, resolvedValue }) => { + return resolvedValue.replace(/[a-zA-Z0-9]/i, '*') +} + +const maskedEmail = createTransformerDirective(schema, transform) + +export default maskedEmail +``` + +and you would use it in your SDLs like this: + +```graphql +type UserExample { + id: Int! + email: String! @maskedEmail # 👈 will replace alphanumeric characters with asterisks in the response! + name: String +} +``` + +### Where can I use a Redwood Directive? + +A directive can only appear in certain locations in a GraphQL schema or operation. These locations are listed in the directive's definition. + +In the example below, the `@maskedEmail` example, the directive can only appear in the `FIELD_DEFINITION` location. + +An example of a `FIELD_DEFINITION` location is a field that exists on a `Type`: + +```graphql +type UserExample { + id: Int! + email: String! @requireAuth + name: String @maskedEmail # 👈 will maskedEmail name in the response! +} + +type Query { + userExamples: [UserExample!]! @requireAuth 👈 will enforce auth when fetching all users + userExamples(id: Int!): UserExample @requireAuth 👈 will enforce auth when fetching a single user +} +``` + +> **Note**: Even though GraphQL supports `FIELD_DEFINITION | ARGUMENT_DEFINITION | INPUT_FIELD_DEFINITION | ENUM_VALUE` locations, RedwoodDirectives can **only** be declared on a `FIELD_DEFINITION` — that is, you **cannot** declare a directive in an `Input type`: +> +> ```graphql +> input UserExampleInput { +> email: String! @maskedEmail # 👈 🙅 not allowed on an input +> name: String! @requireAuth # 👈 🙅 also not allowed on an input +> } +> ``` + +## When Should I Use a Redwood Directive? + +As noted in the [GraphQL spec](https://graphql.org/learn/queries/#directives): + +> Directives can be useful to get out of situations where you otherwise would need to do string manipulation to add and remove fields in your query. Server implementations may also add experimental features by defining completely new directives. + +Here's a helpful guide for deciding when you should use one of Redwood's Validator or Transformer directives: + +| | Use | Directive | Custom? | Type | +| --- | ---------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------- | ------------ | +| ✅ | Check if the request is authenticated? | `@requireAuth` | Built-in | Validator | +| ✅ | Check if the user belongs to a role? | `@requireAuth(roles: ["AUTHOR"])` | Built-in | Validator | +| ✅ | Only allow admins to see emails, but others get a masked value like "###@######.###" | `@maskedEmail(roles: ["ADMIN"])` | Custom | Transformer | +| 🙅 | Know if the logged in user can edit the record, and/or values | N/A - Instead do this check in your service | +| 🙅 | Is my input a valid email address format? | N/A - Instead do this check in your service using [Service Validations](services.md#service-validations) or consider [GraphQL Scalars](https://www.graphql-scalars.dev) | +| 🙅 | I want to remove a field from the response for data filtering; for example, do not include the title of the post | `@skip(if: true )` or `@include(if: false)` | Instead use [core directives](https://graphql.org/learn/queries/#directives) on the GraphQL client query, not the SDL | Core GraphQL | + +## Combining, Chaining and Cascading Directives + +Now that you've seen what Validator and Transformer directives look like and where and when you may use them, you may wonder: can I use them together? Can I transform the result of a transformer? + +The answer is: yes—yes you can! + +### Combine Directives on a Query and a Type Field + +Let's say you want to only allow logged-in users to be able to query `User` details and you only want un-redacted email addresses to be shown to ADMINs. + +You can apply the `@requireAuth` directive to the `user(id: Int!)` query so you have to be logged in. +Then, you can compose a `@maskedEmail` directive that checks the logged-in user's role membership and if they're not an ADMIN, mask the email address: + +```tsx + type User { + id: Int! + name: String! + email: String! @maskedEmail(role: "ADMIN") + createdAt: DateTime! + } + + type Query { + user(id: Int!): User @requireAuth + } +``` + +Or, let's say I want to only allow logged in users to be able to query User details. + +But, I only want ADMIN users to be able to query and fetch the email address. + +I can apply the `@requireAuth` directive to the `user(id: Int!)` query so I have to be logged in. + +And, I can apply the `@requireAuth` directive to the `email` field with a role argument. + +```tsx + type User { + id: Int! + name: String! + email: String! @requireAuth(role: "ADMIN") + createdAt: DateTime! + } + + type Query { + user(id: Int!): User @requireAuth + } +``` + +Now, if a user who is not an ADMIN queries: + +```tsx +query user(id: 1) { + id + name + createdAt +} +``` + +They will get a result. + +But, if they try to query: + +```tsx +query user(id: 1) { + id + name + email + createdAt +} +``` + +They will be forbidden from even making the request. + +### Chaining a Validator and a Transformer + +Similar to the prior example, you may want to chain directives, but the transform doesn't consider authentication or role membership. + +For example, here we ensure that anyone trying to query a User and fetch the email must be authenticated. + +And then, if they are, apply a mask to the email field. + +```tsx + type User { + id: Int! + name: String! + email: String! @requireAuth @maskedEmail + createdAt: DateTime! + } +``` + +### Cascade Transformers + +Maybe you want to apply multiple field formatting? + +If your request event headers includes geographic or timezone info, you could compose a custom Transformer directive called `@localTimezone` could inspect the header value and convert the `createdAt` from UTC to local time -- something often done in the browser. + +Then, you can chain the `@dateFormat` Transformer, to just return the date portion of the timestamp -- and not the time. + +```tsx + type User { + id: Int! + name: String! + email: String! + createdAt: DateTime! @localTimezone @dateFormat + } +``` + +> **Note**: These directives could be alternatively be implemented as "operation directives" so the client can use them on a query instead of the schema-level. These such directives are a potential future Redwood directive feature. + +## GraphQL Handler Setup + +Redwood makes it easy to code, organize, and map your directives into your GraphQL schema. +Simply add them to the `directives` directory and the `createGraphQLHandler` does all the work. + +You simply add them to the `directives` directory and the `createGraphQLHandler` will do all the work. + +> **Note**: Redwood has a generator that will do all the heavy lifting setup for you! + +```tsx title="api/src/functions/graphql.ts" +import { createGraphQLHandler } from '@redwoodjs/graphql-server' + +import directives from 'src/directives/**/*.{js,ts}' // 👈 directives live here +import sdls from 'src/graphql/**/*.sdl.{js,ts}' +import services from 'src/services/**/*.{js,ts}' + +import { db } from 'src/lib/db' +import { logger } from 'src/lib/logger' + +export const handler = createGraphQLHandler({ + loggerConfig: { logger, options: {} }, + directives, // 👈 directives are added to the schema here + sdls, + services, + onException: () => { + // Disconnect from your database with an unhandled exception. + db.$disconnect() + }, +}) +``` + +## Secure by Default with Built-in Directives + +By default, your GraphQL endpoint is open to the world. + +That means anyone can request any query and invoke any Mutation. +Whatever types and fields are defined in your SDL is data that anyone can access. + +But Redwood encourages being secure by default by defaulting all queries and mutations to have the `@requireAuth` directive when generating SDL or a service. +When your app builds and your server starts up, Redwood checks that **all** queries and mutations have `@requireAuth`, `@skipAuth` or a custom directive applied. + +If not, then your build will fail: + +```bash + ✖ Verifying graphql schema... + Building API... + Cleaning Web... + Building Web... + Prerendering Web... +You must specify one of @requireAuth, @skipAuth or a custom directive for +- contacts Query +- posts Query +- post Query +- updatePost Mutation +- deletePost Mutation +``` + +or your server won't startup and you should see that "Schema validation failed": + +```bash +gen | Generating TypeScript definitions and GraphQL schemas... +gen | 47 files generated +api | Building... Took 593 ms +api | [GQL Server Error] - Schema validation failed +api | ---------------------------------------- +api | You must specify one of @requireAuth, @skipAuth or a custom directive for +api | - posts Query +api | - createPost Mutation +api | - updatePost Mutation +api | - deletePost Mutation +``` + +To correct, just add the appropriate directive to your queries and mutations. + +If not, then your build will fail and your server won't startup. + +### @requireAuth + +It's your responsibility to implement the `requireAuth()` function in your app's `api/src/lib/auth.{js|ts}` to check if the user is properly authenticated and/or has the expected role membership. + +The `@requireAuth` directive will call the `requireAuth()` function to determine if the user is authenticated or not. + +```tsx title="api/src/lib/auth.ts" +// ... + +export const isAuthenticated = (): boolean => { + return true // 👈 replace with the appropriate check +} + +// ... + +export const requireAuth = ({ roles }: { roles: AllowedRoles }) => { + if (isAuthenticated()) { + throw new AuthenticationError("You don't have permission to do that.") + } + + if (!hasRole({ roles })) { + throw new ForbiddenError("You don't have access to do that.") + } +} +``` + +> **Note**: The `auth.ts` file here is the stub for a new RedwoodJS app. Once you have setup auth with your provider, this will enforce a proper authentication check. + +### @skipAuth + +If, however, you want your query or mutation to be public, then simply use `@skipAuth`. + +## Custom Directives + +Want to write your own directive? You can of course! +Just generate one using the Redwood CLI; it takes care of the boilerplate and even gives you a handy test! + +### Generators + +When using the `yarn redwood generate` command, +you'll be presented with a choice of creating a Validator or a Transformer directive. + +```bash +yarn redwood generate directive myDirective + +? What type of directive would you like to generate? › - Use arrow-keys. Return to submit. +❯ Validator - Implement a validation: throw an error if criteria not met to stop execution + Transformer - Modify values of fields or query responses +``` + +> **Note:** You can pass the `--type` flag with either `validator` or `transformer` to create the desired directive type. + +After picking the directive type, the files will be created in your `api/src/directives` directory: + +```bash + ✔ Generating directive file ... + ✔ Successfully wrote file `./api/src/directives/myDirective/myDirective.test.ts` + ✔ Successfully wrote file `./api/src/directives/myDirective/myDirective.ts` + ✔ Generating TypeScript definitions and GraphQL schemas ... + ✔ Next steps... + + After modifying your directive, you can add it to your SDLs e.g.: + // example todo.sdl.js + # Option A: Add it to a field + type Todo { + id: Int! + body: String! @myDirective + } + + # Option B: Add it to query/mutation + type Query { + todos: [Todo] @myDirective + } +``` + +### Validator + +Let's create a `@isSubscriber` directive that checks roles to see if the user is a subscriber. + +```bash +yarn rw g directive isSubscriber --type validator +``` + +Next, implement your validation logic in the directive's `validate` function. + +Validator directives don't have access to the field value, (i.e. they're called before resolving the value). But they do have access to the `context` and `directiveArgs`. +They can be async or sync. +And if you want to stop executing (because of insufficient permissions for example), throw an error. +The return value is ignored + +An example of `directiveArgs` is the `roles` argument in the directive `requireAuth(roles: "ADMIN")` + +```tsx +const validate: ValidatorDirectiveFunc = ({ context, directiveArgs }) => { + // You can also modify your directive to take arguments + // and use the directiveArgs object provided to this function to get values + logger.debug(directiveArgs, 'directiveArgs in isSubscriber directive') + + throw new Error('Implementation missing for isSubscriber') +} +``` + +Here we can access the `context` parameter and then check to see if the `currentUser` is authenticated and if they belong to the `SUBSCRIBER` role: + +```tsx title="/api/src/directives/isSubscriber/isSubscriber.ts" +// ... + +const validate: ValidatorDirectiveFunc = ({ context }) => { + if (!context.currentUser)) { + throw new AuthenticationError("You don't have permission to do that.") + } + + if (!context.currentUser.roles?.includes('SUBSCRIBER')) { + throw new ForbiddenError("You don't have access to do that.") + } +} +``` + +#### Writing Validator Tests + +When writing a Validator directive test, you'll want to: + +- ensure the directive is named consistently and correctly so the directive name maps properly when validating +- confirm that the directive throws an error when invalid. The Validator directive should always have a reason to throw an error + +Since we stub out the `Error('Implementation missing for isSubscriber')` case when generating the Validator directive, these tests should pass. +But once you begin implementing the validate logic, it's on you to update appropriately. + +```tsx +import { mockRedwoodDirective, getDirectiveName } from '@redwoodjs/testing/api' + +import isSubscriber from './isSubscriber' + +describe('isSubscriber directive', () => { + it('declares the directive sdl as schema, with the correct name', () => { + expect(isSubscriber.schema).toBeTruthy() + expect(getDirectiveName(isSubscriber.schema)).toBe('isSubscriber') + }) + + it('has a isSubscriber throws an error if validation does not pass', () => { + const mockExecution = mockRedwoodDirective(isSubscriber, {}) + + expect(mockExecution).toThrowError('Implementation missing for isSubscriber') + }) +}) +``` + +:::tip +If your Validator Directive is asynchronous, you can use `mockAsyncRedwoodDirective` instead. + +```ts +import { mockAsyncRedwoodDirective } from '@redwoodjs/testing/api' + +// ... + +describe('isSubscriber directive', () => { + it('has a isSubscriber throws an error if validation does not pass', async () => { + const mockExecution = mockAsyncRedwoodDirective(isSubscriber, {}) + await expect(mockExecution()).rejects.toThrowError( + 'Implementation missing for isSubscriber' + ) + }) +}) +``` + +::: + +### Transformer + +Let's create a `@maskedEmail` directive that checks roles to see if the user should see the complete email address or if it should be obfuscated from prying eyes: + +```bash +yarn rw g directive maskedEmail --type transformer +``` + +Next, implement your validation logic in the directive's `transform` function. + +Transformer directives provide `context` and `resolvedValue` parameters and run **after** resolving the value. +Transformer directives **must** be synchronous, and return a value. +You can throw an error, if you want to stop executing, but note that the value has already been resolved. + +Take note of the `resolvedValue`: + +```tsx +const transform: TransformerDirectiveFunc = ({ context, resolvedValue }) => { + return resolvedValue.replace('foo', 'bar') +} +``` + +It contains the value of the field on which the directive was placed. Here, `email`. +So the `resolvedValue` will be the value of the email property in the User model, the "original value" so-to-speak. + +When you return a value from the `transform` function, just return a modified value and that will be returned as the result and replace the `email` value in the response. + +> 🛎️ **Important** +> +> You must return a value of the same type. So, if your `resolvedValue` is a `String`, return a `String`. If it's a `Date`, return a `Date`. Otherwise, your data will not match the SDL Type. + +#### Writing Transformer Tests + +When writing a Transformer directive test, you'll want to: + +- ensure the directive is named consistently and correctly so the directive name maps properly when transforming +- confirm that the directive returns a value and that it's the expected transformed value + +Since we stub out and mock the `mockedResolvedValue` when generating the Transformer directive, these tests should pass. + +Here we mock the value `foo` and, since the generated `transform` function replaces `foo` with `bar`, we expect that after execution, the returned value will be `bar`. +But once you begin implementing the validate logic, it's on you to update appropriately. + +```tsx +import { mockRedwoodDirective, getDirectiveName } from '@redwoodjs/testing/api' + +import maskedEmail from './maskedEmail' + +describe('maskedEmail directive', () => { + it('declares the directive sdl as schema, with the correct name', () => { + expect(maskedEmail.schema).toBeTruthy() + expect(getDirectiveName(maskedEmail.schema)).toBe('maskedEmail') + }) + + it('has a maskedEmail implementation transforms the value', () => { + const mockExecution = mockRedwoodDirective(maskedEmail, { + mockedResolvedValue: 'foo', + }) + + expect(mockExecution()).toBe('bar') + }) +}) +``` + +:::tip +If your Transformer Directive is asynchronous, you can use `mockAsyncRedwoodDirective` instead. + +```ts +import { mockAsyncRedwoodDirective } from '@redwoodjs/testing/api' + +// ... + +import maskedEmail from './maskedEmail' + +describe('maskedEmail directive', () => { + it('has a maskedEmail implementation transforms the value', async () => { + const mockExecution = mockAsyncRedwoodDirective(maskedEmail, { + mockedResolvedValue: 'foo', + }) + + await expect(mockExecution()).resolves.toBe('bar') + }) +}) +``` +::: diff --git a/docs/versioned_docs/version-7.0/docker.md b/docs/versioned_docs/version-7.0/docker.md new file mode 100644 index 000000000000..53a1c3aa0a38 --- /dev/null +++ b/docs/versioned_docs/version-7.0/docker.md @@ -0,0 +1,680 @@ +--- +description: Redwood's Dockerfile +--- + +# Docker + +:::note The Dockerfile is experimental + +Redwood's Dockerfile is the collective effort of several hard-working community members. +We've worked hard to optimize it, but expect changes as we collaborate with users and deploy providers. + +::: + +If you're not familiar with Docker, we recommend going through their [getting started](https://docs.docker.com/get-started/) documentation. + +## Set up + +To get started, run the setup command: + +``` +yarn rw experimental setup-docker +``` + +The setup commands does several things: +- writes four files: `Dockerfile`, `.dockerignore`, `docker-compose.dev.yml`, and `docker-compose.prod.yml` +- adds the `@redwoodjs/api-server` and `@redwoodjs/web-server` packages to the api and web sides respectively +- edits the `browser.open` setting in the `redwood.toml` (right now, if it's set to `true`, it'll break the dev server when running the `docker-compose.dev.yml`) + +## Usage + +You can start the dev compose file with: + +``` +docker compose -f ./docker-compose.dev.yml up +``` + +And the prod compose file with: + +``` +docker compose -f ./docker-compose.prod.yml up +``` + +:::info make sure to specify build args + +If your api side or web side depend on env vars at build time, you may need to supply them as `--build-args`, or in the compose files. + +This is often the most tedious part of setting up Docker. Have ideas of how it could be better? Let us know on the [forums](https://community.redwoodjs.com/)! + +::: + +The first time you do this, you'll have to use the `console` stage to go in and migrate the database—just like you would with a Redwood app on your machine: + +``` +docker compose -f ./docker-compose.dev.yml run --rm -it console /bin/bash +root@...:/home/node/app# yarn rw prisma migrate dev +``` + +## The Dockerfile in detail + +The documentation here goes through and explains every line of Redwood's Dockerfile. +If you'd like to see the whole Dockerfile for reference, you can find it [here](https://github.com/redwoodjs/redwood/tree/main/packages/cli/src/commands/experimental/templates/docker/Dockerfile) or by setting it up in your project: `yarn rw experimental setup-docker`. + +Redwood takes advantage of [Docker's multi-stage build support](https://docs.docker.com/build/building/multi-stage/) to keep the final production images lean. + +### The `base` stage + +The `base` stage installs dependencies. +It's used as the base image for the build stages and the `console` stage. + +```Dockerfile +FROM node:20-bookworm-slim as base +``` + +We use a Node.js 20 image as the base image because that's the version Redwood targets. +"bookworm" is the codename for the current stable distribution of Debian (version 12). +Lastly, the "slim" variant of the `node:20-bookworm` image only includes what Node.js needs which reduces the image's size while making it more secure. + +:::tip Why not alpine? + +While alpine may be smaller, it uses musl, a different C standard library. +In developing this Dockerfile, we prioritized security over size. + +If you know what you're doing feel free to change this—it's your Dockerfile now! +Just remember to change the `apt-get` instructions further down too if needed. + +::: + +Moving on, next we have `corepack enable`: + +```Dockerfile +RUN corepack enable +``` + +[Corepack](https://nodejs.org/docs/latest-v18.x/api/corepack.html), Node's manager for package managers, needs to be enabled so that Yarn can use the `packageManager` field in your project's root `package.json` to pick the right version of itself. +If you'd rather check in the binary, you still can, but you'll need to remember to copy it over (i.e. `COPY --chown=node:node .yarn/releases .yarn/releases`). + +```Dockerfile +RUN apt-get update && apt-get install -y \ + openssl \ + # python3 make gcc \ + && rm -rf /var/lib/apt/lists/* +``` + +The `node:20-bookworm-slim` image doesn't have [OpenSSL](https://www.openssl.org/), which [seems to be a bug](https://github.com/nodejs/docker-node/issues/1919). +(It was included in the "bullseye" image, the codename for Debian 11.) +On Linux, [Prisma needs OpenSSL](https://www.prisma.io/docs/reference/system-requirements#linux-runtime-dependencies), so we install it here via Ubuntu's package manager APT. +Python and its dependencies are there ready to be uncommented if you need them. See the [Troubleshooting](#python) section for more information. + +[It's recommended](https://docs.docker.com/develop/develop-images/instructions/#apt-get) to combine `apt-get update` and `apt-get install -y` in the same `RUN` statement for cache busting. +After installing, we clean up the apt cache to keep the layer lean. (Running `apt-get clean` isn't required—[official Debian images do it automatically](https://github.com/moby/moby/blob/03e2923e42446dbb830c654d0eec323a0b4ef02a/contrib/mkimage/debootstrap#L82-L105).) + +```Dockerfile +USER node +``` + +This and subsequent `chown` options in `COPY` instructions are for security. +[Services that can run without privileges should](https://docs.docker.com/develop/develop-images/instructions/#user). +The Node.js image includes a user, `node`, created with an explicit `uid` and `gid` (`1000`). +We reuse it. + +```Dockerfile +WORKDIR /home/node/app + +COPY --chown=node:node .yarnrc.yml . +COPY --chown=node:node package.json . +COPY --chown=node:node api/package.json api/ +COPY --chown=node:node web/package.json web/ +COPY --chown=node:node yarn.lock . +``` + +Here we copy the minimum set of files that the `yarn install` step needs. +The order isn't completely arbitrary—it tries to maximize [Docker's layer caching](https://docs.docker.com/build/cache/). +We expect `yarn.lock` to change more than the `package.json`s and the `package.json`s to change more than `.yarnrc.yml`. +That said, it's hard to argue that these files couldn't be arranged differently, or that the `COPY` instructions couldn't be combined. +The important thing is that they're all here, before the `yarn install` step: + +```Dockerfile +RUN mkdir -p /home/node/.yarn/berry/index +RUN mkdir -p /home/node/.cache + +RUN --mount=type=cache,target=/home/node/.yarn/berry/cache,uid=1000 \ + --mount=type=cache,target=/home/node/.cache,uid=1000 \ + CI=1 yarn install +``` + +This step installs all your project's dependencies—production and dev. +Since we use multi-stage builds, your production images won't pay for the dev dependencies installed in this step. +The build stages need the dev dependencies. + +The `mkdir` steps are a workaround for a permission error. We're working on removing them, but for now if you remove them the install step will probably fail. + +This step is a bit more involved than the others. +It uses a [cache mount](https://docs.docker.com/build/cache/#use-your-package-manager-wisely). +Yarn operates in three steps: resolution, fetch, and link. +If you're not careful, the cache for the fetch step basically doubles the number of `node_modules` installed on disk. +We could disable it all together, but by using a cache mount, we can still get the benefits without paying twice. +We set it to the default directory here, but you can change its location in `.yarnrc.yml`. +If you've done so you'll have to change it here too. + +One more thing to note: without setting `CI=1`, depending on the deploy provider, yarn may think it's in a TTY, making the logs difficult to read. With this set, yarn adapts accordingly. +Enabling CI enables [immutable installs](https://v3.yarnpkg.com/configuration/yarnrc#enableImmutableInstalls) and [inline builds](https://v3.yarnpkg.com/configuration/yarnrc#enableInlineBuilds), both of which are highly recommended. + +```Dockerfile +COPY --chown=node:node redwood.toml . +COPY --chown=node:node graphql.config.js . +COPY --chown=node:node .env.defaults .env.defaults +``` + +We'll need these config files for the build and production stages. +The `redwood.toml` file is Redwood's de-facto config file. +Both the build and serve stages read it to enable and configure functionality. + +:::warning `.env.defaults` is ok to include but `.env` is not + +If you add a secret to the Dockerfile, it can be excavated. +While it's technically true that multi stage builds add a sort of security layer, it's not a best practice. +Leave them out and look to your deploy provider for further configuration. + +::: + +### The `api_build` stage + +The `api_build` stage builds the api side: + +```Dockerfile +FROM base as api_build + +# If your api side build relies on build-time environment variables, +# specify them here as ARGs. +# +# ARG MY_BUILD_TIME_ENV_VAR + +COPY --chown=node:node api api +RUN yarn rw build api +``` + +After the work we did in the base stage, building the api side amounts to copying in the api directory and running `yarn rw build api`. + +### The `api_serve` stage + +The `api_serve` stage serves your GraphQL api and functions: + +```Dockerfile +FROM node:20-bookworm-slim as api_serve + +RUN corepack enable + +RUN apt-get update && apt-get install -y \ + openssl \ + # python3 make gcc \ + && rm -rf /var/lib/apt/lists/* +``` + +We don't start from the `base` stage, but begin anew with the `node:20-bookworm-slim` image. +Since this is a production stage, it's important for it to be as small as possible. +Docker's [multi-stage builds](https://docs.docker.com/build/building/multi-stage/) enables this. + +```Dockerfile +USER node +WORKDIR /home/node/app + +COPY --chown=node:node .yarnrc.yml .yarnrc.yml +COPY --chown=node:node package.json . +COPY --chown=node:node api/package.json api/ +COPY --chown=node:node yarn.lock yarn.lock +``` + +Like other `COPY` instructions, ordering these files with care enables layering caching. + +```Dockerfile +RUN mkdir -p /home/node/.yarn/berry/index +RUN mkdir -p /home/node/.cache + +RUN --mount=type=cache,target=/home/node/.yarn/berry/cache,uid=1000 \ + --mount=type=cache,target=/home/node/.cache,uid=1000 \ + CI=1 yarn workspaces focus api --production +``` + +This is a critical step for image size. +We don't use the regular `yarn install` command. +Using the [official workspaces plugin](https://github.com/yarnpkg/berry/tree/master/packages/plugin-workspace-tools)—which is included by default in yarn v4—we "focus" on the api workspace, only installing its production dependencies. + +The cache mount will be populated at this point from the install in the `base` stage, so the fetch step should fly by. + +```Dockerfile +COPY --chown=node:node redwood.toml . +COPY --chown=node:node graphql.config.js . +COPY --chown=node:node .env.defaults .env.defaults + +COPY --chown=node:node --from=api_build /home/node/app/api/dist /home/node/app/api/dist +COPY --chown=node:node --from=api_build /home/node/app/api/db /home/node/app/api/db +COPY --chown=node:node --from=api_build /home/node/app/node_modules/.prisma /home/node/app/node_modules/.prisma +``` + +Here's where we really take advantage of multi-stage builds by copying from the `api_build` stage. +At this point all the building has been done. Now we can just grab the artifacts without having to lug around the dev dependencies. + +There's one more thing that was built: the prisma client in `node_modules/.prisma`. +We need to grab it too. + +```Dockerfile +ENV NODE_ENV=production + +CMD [ "node_modules/.bin/rw-server", "api" ] +``` + +Lastly, the default command is to start the api server using the bin from the `@redwoodjs/api-server` package. +You can override this command if you have more specific needs. + +Note that the Redwood CLI isn't available anymore. (It's a dev dependency.) +To access the server bin, we have to find its path in `node_modules`. +Though this is somewhat discouraged in modern yarn, since we're using the `node-modules` node linker, it's in `node_modules/.bin`. + +### The `web_build` stage + +This `web_build` builds the web side: + +```Dockerfile +FROM base as web_build + +COPY --chown=node:node web web +RUN yarn rw build web --no-prerender +``` + +After the work we did in the base stage, building the web side amounts to copying in the web directory and running `yarn rw build web`. + +This stage is a bit of a simplification. +It foregoes Redwood's prerendering (SSG) capability. +Prerendering is a little trickier; see [the `web_prerender_build` stage](#the-web_prerender_build-stage). + +If you've included environment variables in your `redwood.toml`'s `web.includeEnvironmentVariables` field, you'll want to specify them as ARGs here. +The setup command should've inlined them for you. + +### The `web_prerender_build` stage + +The `web_prerender_build` stage builds the web side with prerender. + +```Dockerfile +FROM api_build as web_build_with_prerender + +COPY --chown=node:node web web +RUN yarn rw build web +``` + +Building the web side with prerendering poses a challenge. +Prerender needs the api side around to get data for your Cells and route hooks. +The key line here is the first one—this stage uses the `api_build` stage as its base image. + +### The `web_serve` stage + +```Dockerfile +FROM node:20-bookworm-slim as web_serve + +RUN corepack enable + +USER node +WORKDIR /home/node/app + +COPY --chown=node:node .yarnrc.yml . +COPY --chown=node:node package.json . +COPY --chown=node:node web/package.json web/ +COPY --chown=node:node yarn.lock . + +RUN mkdir -p /home/node/.yarn/berry/index +RUN mkdir -p /home/node/.cache + +RUN --mount=type=cache,target=/home/node/.yarn/berry/cache,uid=1000 \ + --mount=type=cache,target=/home/node/.cache,uid=1000 \ + CI=1 yarn workspaces focus web --production + +COPY --chown=node:node redwood.toml . +COPY --chown=node:node graphql.config.js . +COPY --chown=node:node .env.defaults .env.defaults + +COPY --chown=node:node --from=web_build /home/node/app/web/dist /home/node/app/web/dist + +ENV NODE_ENV=production \ + API_PROXY_TARGET=http://api:8911 + +CMD "node_modules/.bin/rw-web-server" "--api-proxy-target" "$API_PROXY_TARGET" +``` + +Most of this stage is similar to the `api_serve` stage, except that we're copying from the `web_build` stage instead of the `api_build`. +(If you're prerendering, you'll want to change the `--from=web_build` to `--from=web_prerender_build`.) + +The binary we're using here to serve the web side is `rw-web-server` which comes from the `@redwoodjs/web-server` package. +While this web server will be much more fully featured in the future, right now it's mostly just to get you going. +Ideally you want to put a web server like Nginx or Caddy in front of it. + +Lastly, note that we use the shell form of `CMD` here for its variable expansion. + +### The `console` stage + +The `console` stage is an optional stage for debugging: + +```Dockerfile +FROM base as console + +# To add more packages: +# +# ``` +# USER root +# +# RUN apt-get update && apt-get install -y \ +# curl +# +# USER node +# ``` + +COPY --chown=node:node api api +COPY --chown=node:node web web +COPY --chown=node:node scripts scripts +``` + +The console stage completes the base stage by copying in the rest of your Redwood app. +But then it pretty much leaves you to your own devices. +The intended way to use it is to create an ephemeral container by starting a shell like `/bin/bash` in the image built by targeting this stage: + +```bash +# Build the console image: +docker build . -t console --target console +# Start an ephemeral container from it: +docker run --rm -it console /bin/bash +``` + +As the comment says, feel free to add more packages. +We intentionally kept them to a minimum in the base stage, but you shouldn't worry about the size of the image here. + +## Troubleshooting + +### Python + +We tried to make the Dockerfile as lean as possible. +In some cases, that means we excluded a dependency your project needs. +And by far the most common is Python. + +During a stage's `yarn install` step (`RUN ... yarn install`), if you see an error like the following: + +``` +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python Python is not set from command line or npm configuration +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python Python is not set from environment variable PYTHON +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python checking if "python3" can be used +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python - executable path is "" +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python - "" could not be run +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python checking if "python" can be used +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python - executable path is "" +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python - "" could not be run +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python ********************************************************** +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python You need to install the latest version of Python. +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python Node-gyp should be able to find and use Python. If not, +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python you can try one of the following options: +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python - Use the switch --python="/path/to/pythonexecutable" +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python (accepted by both node-gyp and npm) +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python - Set the environment variable PYTHON +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python - Set the npm configuration variable python: +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python npm config set python "/path/to/pythonexecutable" +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python For more information consult the documentation at: +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python https://github.com/nodejs/node-gyp#installation +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python ********************************************************** +➤ YN0000: │ bufferutil@npm:4.0.8 STDERR gyp ERR! find Python +``` + +It's because your project depends on Python and the image doesn't provide it. + +It's easy to fix: just add `python3` and its dependencies (usually `make` and `gcc`): + +```diff + FROM node:20-bookworm-slim as base + + RUN apt-get update && apt-get install -y \ + openssl \ ++ python3 make gcc \ + && rm -rf /var/lib/apt/lists/* +``` + +Not sure why your project depends on Python? `yarn why` is your friend. +From the error message, we know `bufferutil` couldn't build. +But why do we have `bufferutil`? + +``` +yarn why bufferutil +└─ websocket@npm:1.0.34 + └─ bufferutil@npm:4.0.8 (via npm:^4.0.1) +``` + +`websocket` needs `bufferutil`. But why do we have `websocket`? +Keep pulling the thread till you get to a top-level dependency: + +``` +yarn why websocket +└─ @supabase/realtime-js@npm:2.8.4 + └─ websocket@npm:1.0.34 (via npm:^1.0.34) + +yarn why @supabase/realtime-js +└─ @supabase/supabase-js@npm:2.38.4 + └─ @supabase/realtime-js@npm:2.8.4 (via npm:^2.8.4) + +yarn why @supabase/supabase-js +├─ api@workspace:api +│ └─ @supabase/supabase-js@npm:2.38.4 (via npm:^2.21.0) +│ +└─ web@workspace:web + └─ @supabase/supabase-js@npm:2.38.4 (via npm:^2.21.0) +``` + +In this case, it looks like it's ultimately because of our auth provider, `@supabase/supabase-js`. + +## Using the Server File + +Redwood v7 introduced a new entry point to Redwood's api server: the server file at `api/src/server.ts`. +The server file was made with Docker in mind. It allows you to + +1. have control over how the api server starts, +2. customize the server as much as you want, and +3. minimize the number of dependencies needed to start the api server process (all you need is Node.js!) + +Get started by running the setup command: + +``` +yarn rw setup server-file +``` + +This should give you a new file at `api/src/server.ts`: + +```typescript title="api/src/server.ts" +import { createServer } from '@redwoodjs/api-server' + +import { logger } from 'src/lib/logger' + +async function main() { + const server = await createServer({ + logger, + }) + + await server.start() +} + +main() +``` + +Without the server file, to start the api side, you'd use binaries provided by `@redwoodjs/api-server` such as `yarn rw-server api` (you may also see this as `./node_modules/.bin/rw-server api`). + +With the server file, there's no indirection. Just use `node`: + +``` +yarn node api/dist/server.js +``` + +:::info You have to build first + +You can't run the server file directly with Node.js; it has to be built first: + +``` +yarn rw build api +``` + +The api serve stage in the Dockerfile pulls from the api build stage, so things are already in the right order there. Similarly, for `yarn rw dev`, the dev server will build and reload the server file for you. + +::: + +That means you can swap the `CMD` instruction in the api server stage: + +```diff + ENV NODE_ENV=production + +- CMD [ "node_modules/.bin/rw-server", "api" ] ++ CMD [ "yarn", "node", "api/dist/server.js" ] +``` + +### Configuring the server + +There's two ways you can configure the server. + +First, you can configure how the underlying Fastify server is instantiated via the`fastifyServerOptions` passed to the `createServer` function: + +```ts title="api/src/server.ts" +const server = await createServer({ + logger, + // highlight-start + fastifyServerOptions: { + // ... + } + // highlight-end +}) +``` + +For the complete list of options, see [Fastify's documentation](https://fastify.dev/docs/latest/Reference/Server/#factory). + +Second, you can register Fastify plugins on the server instance: + +```ts title="api/src/server.ts" +const server = await createServer({ + logger, +}) + +// highlight-next-line +server.register(myFastifyPlugin) +``` + +#### Example: Compressing Payloads and Rate Limiting + +Let's say that we want to compress payloads and add rate limiting. +We want to compress payloads only if they're larger than 1KB, preferring deflate to gzip, +and we want to limit IP addresses to 100 requests in a five minute window. +We can leverage two Fastify ecosystem plugins, [@fastify/compress](https://github.com/fastify/fastify-compress) and [@fastify/rate-limit](https://github.com/fastify/fastify-rate-limit) respectively. + +First, you'll need to install these packages: + +``` +yarn workspace api add @fastify/compress @fastify/rate-limit +``` + +Then register them with the appropriate config: + +```ts title="api/src/server.ts" +const server = await createServer({ + logger, +}) + +await server.register(import('@fastify/compress'), { + global: true, + threshold: 1024, + encodings: ['deflate', 'gzip'], +}) + +await server.register(import('@fastify/rate-limit'), { + max: 100, + timeWindow: '5 minutes', +}) +``` + +#### Example: File Uploads + +If you try to POST file content to the api server such as images or PDFs, you may see the following error from Fastify: + +```json +{ + "statusCode": 400, + "code": "FST_ERR_CTP_INVALID_CONTENT_LENGTH", + "error": "Bad Request", + "message": "Request body size did not match Content-Length" +} +``` + +This's because Fastify [only supports `application/json` and `text/plain` content types natively](https://www.fastify.io/docs/latest/Reference/ContentTypeParser/). +While Redwood configures the api server to also accept `application/x-www-form-urlencoded` and `multipart/form-data`, if you want to support other content or MIME types (likes images or PDFs), you'll need to configure them here in the server file. + +You can use Fastify's `addContentTypeParser` function to allow uploads of the content types your application needs. +For example, to support image file uploads you'd tell Fastify to allow `/^image\/.*/` content types: + +```ts title="api/src/server.ts" +const server = await createServer({ + logger, +}) + +server.addContentTypeParser(/^image\/.*/, (req, payload, done) => { + payload.on('end', () => { + done() + }) +}) +``` + +The regular expression (`/^image\/.*/`) above allows all image content or MIME types because [they start with "image"](https://developer.mozilla.org/en-US/docs/Web/Media/Formats/Image_types). + +Now, when you POST those content types to a function served by the api server, you can access the file content on `event.body`. + +### The `start` method + +Since there's a few different ways to configure the host and port the server listens at, the server instance returned by `createServer` has a special `start` method: + +```ts title="api/src/server.ts" +await server.start() +``` + +`start` is a thin wrapper around [`listen`](https://fastify.dev/docs/latest/Reference/Server/#listen). +It takes the same arguments as `listen`, except for host and port. It computes those in the following way, in order of precedence: + +1. `--apiHost` or `--apiPort` flags: + + ``` + yarn node api/dist/server.js --apiHost 0.0.0.0 --apiPort 8913 + ``` + +2. `REDWOOD_API_HOST` or `REDWOOD_API_PORT` env vars: + + ``` + export REDWOOD_API_HOST='0.0.0.0' + export REDWOOD_API_PORT='8913' + yarn node api/dist/server.js + ``` + +3. `[api].host` and `[api].port` in `redwood.toml`: + + ```toml title="redwood.toml" + [api] + host = '0.0.0.0' + port = 8913 + ``` + +If you'd rather not have `createServer` parsing `process.argv`, you can disable it via `parseArgv`: + +```ts title="api/src/server.ts" +await createServer({ + parseArgv: false, +}) +``` + +And if you'd rather it do none of this, just change `start` to `listen` and specify the host and port inline: + +```ts title="api/src/server.ts" +await server.listen({ + host: '0.0.0.0', + port: 8913, +}) +``` + +If you don't specify a host, `createServer` uses `NODE_ENV` to set it. If `NODE_ENV` is production, it defaults to `'0.0.0.0'` and `'::'` otherwise. +The Dockerfile sets `NODE_ENV` to production so that things work out of the box. diff --git a/docs/versioned_docs/version-7.0/environment-variables.md b/docs/versioned_docs/version-7.0/environment-variables.md new file mode 100644 index 000000000000..537d7d259930 --- /dev/null +++ b/docs/versioned_docs/version-7.0/environment-variables.md @@ -0,0 +1,152 @@ +--- +description: How to use environment variables on the api and web sides +--- + +# Environment Variables + +You can provide environment variables to each side of your Redwood app in different ways, depending on each Side's target, and whether you're in development or production. + +> Right now, Redwood apps have two fixed Sides, API and Web, that each have a single target, nodejs and browser respectively. + +## Generally + +Redwood apps use [dotenv](https://github.com/motdotla/dotenv) to load vars from your `.env` file into `process.env`. +For a reference on dotenv syntax, see the dotenv README's [Rules](https://github.com/motdotla/dotenv#rules) section. + +> Technically, we use [dotenv-defaults](https://github.com/mrsteele/dotenv-defaults), which is how we also supply and load `.env.defaults`. + +<!-- also in a Redwood app's base directory. --> + +Redwood also configures Vite, so that all references to `process.env` vars on the Web side will be replaced with the variable's actual value at build-time. More on this in [Web](#Web). + +## Web + +### Including environment variables +> **Heads Up:** for Web to access environment variables in production, you _must_ configure one of the options below. +> +> Redwood recommends **Option 1: `redwood.toml`** as it is the most robust. + +In production, you can get environment variables to the Web Side either by + +1. adding to `redwood.toml` via the `includeEnvironmentVariables` array, or +2. prefixing with `REDWOOD_ENV_` + +Just like for the API Side, you'll also have to set them up with your provider. Some hosting providers distinguish between build and runtime environments for configuring environment variables. +Environment variables for the web side should in those cases be configured as build-time variables. + +#### Option 1: includeEnvironmentVariables in redwood.toml + +For Example: + +```toml title="redwood.toml" +[web] + includeEnvironmentVariables = ['SECRET_API_KEY', 'ANOTHER_ONE'] +``` + +By adding environment variables to this array, they'll be available to Web in production via `process.env.SECRET_API_KEY`. This means that if you have an environment variable like `process.env.SECRET_API_KEY` Redwood removes and replaces it with its _actual_ value. + +Note: if someone inspects your site's source, _they could see your `REDWOOD_ENV_SECRET_API_KEY` in plain text._ This is a limitation of delivering static JS and HTML to the browser. + +#### Option 2: Prefixing with REDWOOD\_ENV\_ + +In `.env`, if you prefix your environment variables with `REDWOOD_ENV_`, they'll be available via `process.env.REDWOOD_ENV_MY_VAR_NAME`, and will be dynamically replaced at build-time. + +Like the option above, these are also removed and replaced with the _actual value_ during build in order to be available in production. + + +### Accessing API URLs + +Redwood automatically makes your API URL configurations from the web section of your `redwood.toml` available globally. +They're accessible via the `window` or `global` objects. +For example, `global.RWJS_API_GRAPHQL_URL` gives you the URL for your graphql endpoint. + +The toml values are mapped as follows: + +| `redwood.toml` key | Available globally as | Description | +| ------------------ | ----------------------------- | ---------------------------------------- | +| `apiUrl` | `global.RWJS_API_URL` | URL or absolute path to your api-server | +| `apiGraphQLUrl` | `global.RWJS_API_GRAPHQL_URL` | URL or absolute path to GraphQL function | + +See the [redwood.toml reference](app-configuration-redwood-toml.md#api-paths) for more details. + +## Development Fatal Error Page + +```text title=".env" +REDWOOD_ENV_EDITOR=vscode +``` + +Redwood comes with a `FatalErrorPage` that displays helpful information—like the stack trace and the request—when something breaks. + +> `FatalErrorPage` isn't bundled when deploying to production + +As part of the stack trace, there are links to the original source files so that they can be quickly opened in your editor. +The page defaults to VSCode, but you can override the editor by setting the environment variable `REDWOOD_ENV_EDITOR`. + +## API + +### Development + +You can access environment variables defined in `.env` and `.env.defaults` as `process.env.VAR_NAME`. For example, if we define the environment variable `HELLO_ENV` in `.env`: + +``` +HELLO_ENV=hello world +``` + +and make a hello Function (`yarn rw generate function hello`) and reference `HELLO_ENV` in the body of our response: + +```jsx {6} title="./api/src/functions/hello.js" +export const handler = async (event, context) => { + return { + statusCode: 200, + body: `${process.env.HELLO_ENV}`, + } +} +``` + +Navigating to http://localhost:8911/hello shows that the Function successfully accesses the environment variable: + +<!-- @todo --> +<!-- Get a better-quality pic --> + +![rw-envVars-api](https://user-images.githubusercontent.com/32992335/86520528-47112100-bdfa-11ea-8d7e-1c0d502805b2.png) + +### Production + +<!-- @todo --> +<!-- Deployment system? platform? --> + +Whichever platform you deploy to, they'll have some specific way of making environment variables available to the serverless environment where your Functions run. For example, if you deploy to Netlify, you set your environment variables in **Settings** > **Build & Deploy** > **Environment**. You'll just have to read your provider's documentation. +Some hosting providers distinguish between build and runtime environments for configuring environment variables. Environment variables for the api side should in those cases be configured as runtime variables. + +## Keeping Sensitive Information Safe + +Since it usually contains sensitive information, you should [never commit your `.env` file](https://github.com/motdotla/dotenv#should-i-commit-my-env-file). Note that you'd actually have to go out of your way to do this as, by default, a Redwood app's `.gitignore` explicitly ignores `.env`: + +```plaintext {2} +.DS_Store +.env +.netlify +dev.db +dist +dist-babel +node_modules +yarn-error.log +``` + +## Where Does Redwood Load My Environment Variables? + +For all the variables in your `.env` and `.env.defaults` files to make their way to `process.env`, there has to be a call to `dotenv`'s `config` function somewhere. So where is it? + +It's in [the CLI](https://github.com/redwoodjs/redwood/blob/main/packages/cli/src/index.js#L6-L12)—every time you run a `yarn rw` command: + +```jsx title="packages/cli/src/index.js" +import { config } from 'dotenv-defaults' + +config({ + path: path.join(getPaths().base, '.env'), + encoding: 'utf8', + defaults: path.join(getPaths().base, '.env.defaults'), +}) +``` + +Remember, if `yarn rw dev` is already running, your local app won't reflect any changes you make to your `.env` file until you stop and re-run `yarn rw dev`. diff --git a/docs/versioned_docs/version-7.0/forms.md b/docs/versioned_docs/version-7.0/forms.md new file mode 100644 index 000000000000..db7b39ce2657 --- /dev/null +++ b/docs/versioned_docs/version-7.0/forms.md @@ -0,0 +1,548 @@ +--- +description: Redwood makes building forms easier with helper components +--- + +# Forms + +Redwood provides several helpers to make building forms easier. +All of Redwood's helpers are simple wrappers around [React Hook Form](https://react-hook-form.com/) (RHF) that make it even easier to use in most cases. + +If Redwood's helpers aren't flexible enough for you, you can use React Hook Form directly. `@redwoodjs/forms` exports everything it does: + +```jsx +import { + useForm, + useFormContext, + /** + * Or anything else React Hook Form exports! + * + * @see {@link https://react-hook-form.com/api} + */ +} from '@redwoodjs/forms' +``` + +## Overview + +`@redwoodjs/forms` exports the following components: + +| Component | Description | +|:------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------| +| `<Form>` | Surrounds all components, providing form and error contexts | +| `<FormError>` | Displays error messages from the server. Typically placed at the top of your form | +| `<Label>` | Used in place of the HTML `<label>` tag. Accepts error-styling props | +| `<InputField>` | Used in place of the HTML `<input>` tag. Accepts validation and error-styling props (also see the list of input field components enumerated below) | +| `<SelectField>` | Used in place of the HTML `<select>` tag. Accepts validation and error-styling props | +| `<TextAreaField>` | Used in place of the HTML `<textarea>` tag. Accepts validation and error-styling props | +| `<FieldError>` | Displays error messages if the field with the same `name` prop has validation errors. Only renders if there's an error on the associated field | +| `<Submit>` | Used in place of `<button type="submit">`. Triggers validation and "submission" (executes the function passed to `<Form>`'s `onSubmit` prop) | + +All HTML `<input>` types are also available as components. They follow the naming convention `<TypeField>` where `Type` is one of the [HTML input types](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input#input_types). +We'll refer to them collectively as "input fields". +The full list is: + +- `<ButtonField>` +- `<CheckboxField>` +- `<ColorField>` +- `<DateField>` +- `<DatetimeLocalField>` +- `<EmailField>` +- `<FileField>` +- `<HiddenField>` +- `<ImageField>` +- `<MonthField>` +- `<NumberField>` +- `<PasswordField>` +- `<RadioField>` +- `<RangeField>` +- `<ResetField>` +- `<SearchField>` +- `<SubmitField>` +- `<TelField>` +- `<TextField>` +- `<TimeField>` +- `<UrlField>` +- `<WeekField>` + +### Validation and Error-styling Props + +All components ending in `Field` (i.e. all input fields, along with `<SelectField>` and `<TextAreaField>`) accept validation and error-styling props. +By validation and error-styling props, we mean three props specifically: + +- `validation`, which accepts all of React Hook Form's [`register` options](https://react-hook-form.com/api/useform/register), plus the Redwood-exclusive coercion helpers `valueAsBoolean`, `valueAsJSON` +- `errorClassName` and `errorStyle`, which are the classes and styles to apply if there's an error + +Besides `name`, all other props passed to these components are forwarded to the tag they render. +Here's a table for reference: + +| Prop | Description | +|:-----------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| `name` | The name of the field. React Hook Form uses it a key to hook it up with everything else | +| `validation` | All your validation logic. Accepts all of React Hook Form's [`register` options](https://react-hook-form.com/api/useform/register), plus the Redwood-exclusive coercion helpers `valueAsBoolean`, `valueAsJSON` | +| `errorClassName` | The class name to apply if there's an error | +| `errorStyle` | The style to apply if there's an error | + +### Example + +A typical React component using these helpers would look something like this: + +```jsx +import { + Form, + Label, + TextField, + TextAreaField, + FieldError, + Submit, +} from '@redwoodjs/forms' + +const ContactPage = () => { + const onSubmit = (data) => { + console.log(data) + } + + return ( + <Form onSubmit={onSubmit}> + <Label name="name" className="label" errorClassName="label error" /> + <TextField + name="name" + className="input" + errorClassName="input error" + validation={{ required: true }} + /> + <FieldError name="name" className="error-message" /> + + <Label name="email" className="label" errorClassName="label error" /> + <TextField + name="email" + className="input" + errorClassName="input error" + validation={{ + required: true, + pattern: { + value: /[^@]+@[^\.]+\..+/, + }, + }} + /> + <FieldError name="email" className="error-message" /> + + <Label name="message" className="label" errorClassName="label error" /> + <TextAreaField + name="message" + className="input" + errorClassName="input error" + validation={{ required: true }} + /> + <FieldError name="message" className="error-message" /> + + <Submit className="button">Save</Submit> + </Form> + ) +} +``` + +## `<Form>` + +Any form you want Redwood to validate and style in the presence errors should be surrounded by this tag. + +| Prop | Description | +|:--------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------| +| `config` | Accepts an object containing options for React Hook Form's [`useForm` hook](https://react-hook-form.com/api/useform) | +| `formMethods` | The functions returned from `useForm`. You only need to use this prop if you need to access to one of the functions that `useForm` returns (see example below) | +| `onSubmit` | Accepts a function to be called if validation succeeds. Called with an object containing name-value pairs of all the fields in your form | + +All other props are forwarded to the `<form>` tag that it renders. + +### `<Form>` Explained + +`<Form>` encapsulates React Hook Form's `useForm` hook and `<FormProvider>` context, along with Redwood's `ServerError` context. +It's hard to talk about this component without getting into the nitty-gritty of React Hook Forms. + +`useForm` is React Hook Form's major hook. +It returns a bunch of functions, one of which is `register`, which you use to quite literally "register" fields into React Hook Form so it can validate them. +(This has to do with [controlled vs. uncontrolled components](https://reactjs.org/docs/uncontrolled-components.html). React Hook Form takes the latter approach.) + +All of Redwood's form helpers need the `register` function to do what they do. But they don't get it straight from `<Form>` because they could be nested arbitrarily deep. That's where `<FormProvider>` comes in: by passing the functions returned from `useForm` to `<FormProvider>`, Redwood's helpers can just use `useFormContext` to get what they need. + +### Using `formMethods` + +There are some functions that `useForm` returns that it'd be nice to have access to. +For example, `useForm` returns a function `reset`, which resets the form's fields. +To access it, you have to call `useForm` yourself. +But you still need to pass `useForm`'s return to the `<FormProvider>` so that Redwood's helpers can register themselves: + +```jsx +import { useForm } from 'react-hook-form' + +const ContactPage = () => { + const formMethods = useForm() + + const onSubmit = (data) => { + console.log(data) + formMethods.reset() + } + + return ( + <Form formMethods={formMethods} onSubmit={onSubmit}> + // Still works! + <TextField name="name" validation={{ required: true }}> + </Form> + ) +} +``` + +## `<FormError>` + +This helper renders a `<div>` containing a "title" message and a `<ul>` enumerating any errors reported by the server when trying to save your form. You can see it in a scaffold if you submit a form that somehow gets passed client-side validation: + +![image](https://user-images.githubusercontent.com/32992335/138611080-9bb138a9-59cc-406d-b926-ef46f4aa7997.png) + +For example, let's say you have a form with a `<TextField>` for a user's email address, but you didn't specify any validation on it: + +```jsx {22} +import { useMutation } from '@redwoodjs/web' + +const CREATE_CONTACT = gql` + mutation CreateContactMutation($input: ContactInput!) { + createContact(input: $input) { + id + } + } +` + +const ContactPage = () => { + const [create, { loading, error }] = useMutation(CREATE_CONTACT) + + const onSubmit = (data) => { + create({ variables: { input: data }}) + } + + return ( + <Form onSubmit={onSubmit}> + <FormError error={error}> + // No validation—any email goes! + <TextField name="email" /> + </Form> + ) +} +``` + +Since there's no validation, anything goes! +On the client at least. +GraphQL is built on types, so it's not going to let just anything through. +Instead it'll throw an error and bubble it back up to the top (via the `error` object returned by the `useMutation` hook) where `<FormError>` can render something like: + +```html +<div> + <p> + Can't create new contact: + </p> + <ul> + <li> + email is not formatted like an email address + </li> + </ul> +</div> +``` + +## `<Label>` + +Renders an HTML `<label>` tag with different `className` and `style` props depending on whether the field it's associated with has a validation error. + +This tag can be self-closing, in which case the `name` becomes the text of the label: + +```html +<Label name="name" className="input" errorClassName="input error" /> + +<!-- Renders: <label for="name" class="input">name</label> --> +``` + +It can also have standard separate open/close tags and take text inside, in which case that text is the text of the rendered `<label>`: + +```html +<Label name="name" className="input" errorClassName="input error">Your Name</Label> + +<!-- Renders: <label for="name" class="input">Your Name</label> --> +``` + +All props are passed to the underlying `<label>` tag besides the ones listed below: + +| Prop | Description | +|:-----------------|:------------------------------------------------------------------------------------------------------------------------------------------| +| `name` | The name of the field that this label is associated with. This should be the same as the `name` prop on the input field this label is for | +| `errorClassName` | The `className` that's used if the field with the same `name` has a validation error | +| `errorStyle` | The `style` that's used if the field with the same `name` has a validation error | + +## Input Fields + +Inputs are the backbone of most forms. +While you can use `<InputField>` and it's `type` prop to make all the different kinds of input fields you'd use in a form, it's often easier to reach for the named input fields (listed above) which have defaults for things like coercion configured where appropriate. + +### Default coercion + +Certain input fields handle coercion automatically, but you can always override the coercion or, if it's not built-in, set it manually via the `validation` prop's [setValueAs](https://react-hook-form.com/api/useform/register) property. + +The input fields that coerce automatically are: + +| Field | Default coercion | +|:-----------------------|:-----------------| +| `<CheckboxField>` | `valueAsBoolean` | +| `<NumberField>` | `valueAsNumber` | +| `<DateField>` | `valueAsDate` | +| `<DatetimeLocalField>` | `valueAsDate` | + +`valueAsDate` and `valueAsNumber` are built into React Hook Form and are based on the HTML standard. +But because Redwood uses GraphQL on the backend, it's important that the types submitted by the form be what the GraphQL server expects. +Instead of forcing users to make heavy-use of `setValueAs` for custom coercion, Redwood extends react hook form's `valueAs` properties with two more for convenience: + +- `valueAsBoolean` +- `valueAsJSON` + +### Default treatment of empty input values + +Redwood provides a flexible treatment of empty input field value. Appropriate treatment of empty fields can make working with fields for database relations easier. + +The treatment of empty field values is governed by the following: + + 1. If `setValueAs` is specified by the user, the specified function will determine the behavior of empty fields. + 2. If the `emptyAs` prop is set, then the `emptyAs` prop will determine the field value on an empty condition. See below for `emptyAs` prop values. + 3. If the `validation = { required: true }` prop is set, an empty field will return null. However, + the validation provided by react-hook-forms should engage and prevent submission of the form as an empty value + would not satisfy the `required` validation. + 4. If the field is an `Id` field, that is its name ends in "Id", then an empty field will return `null`. A `null` value is the most appropriate value for most database relation fields. + For scenarios where another value is required for empty cases, utilize the `emptyAs` prop. + 5. If none of the above cases apply, the field value will be set as follows for empty field scenarios: + - DateFields → null + - NumberFields → NaN + - TextFields with valueAsNumber set → NaN + - SelectFields with valueAsNumber set → NaN + - SelectFields without valueAsNumber set → '' (empty string) + - TextFields with valueAsJSON set → null + - TextFields and comparable → '' (empty string) + +### emptyAs prop + +The `emptyAs` prop allows the user to override the default value for an input field if the field is empty. Provided that a `setValueAs` prop is not specified, Redwood will allow you to override the default empty value returned. +The possible values for `emptyAs` are: +- `null` +- `'undefined'` +- `0` +- `''` (empty string) + +For example: +``` +<NumberField name="quantity" emptyAs="undefined" /> +<NumberField name="score" emptyAs={null} /> +``` +will return `undefined` if the field is empty. + +### Custom Input Fields + +You can create a custom field that integrates with Redwood through the use of Redwood's `useRegister` and `useErrorStyles` hooks. Each of these serving a different purpose depending on what you are trying to build. + +`useRegister` registers the field with react-hook-form and is a wrapper for [`register`](https://react-hook-form.com/api/useform/register). + +`useErrorStyles` sets up error styling for your custom input field. + +Using these two together you can create custom input fields that replicate a Redwood input field while also allowing for custom domain logic. + +In the following example we have an all-in-one custom required input field with label, input, and error display. + +```jsx +import { FieldError, useErrorStyles, useRegister } from '@redwoodjs/forms' + +const RequiredField = ({ label, name, validation }) => { + const register = useRegister({ + name, + validation: {...validation, required: true} + }) + + const { className: labelClassName, style: labelStyle } = useErrorStyles({ + className: `my-label-class`, + errorClassName: `my-label-error-class`, + name, + }) + + const { className: inputClassName, style: inputStyle } = useErrorStyles({ + className: `my-input-class`, + errorClassName: `my-input-error-class`, + name, + }) + + return ( + <> + <label className={labelClassName} style={labelStyle}>{label}</label> + <input + className={inputClassName} + style={inputStyle} + type="text" + {...register} + /> + <FieldError name={name}> + </> + ) +} +``` + +### Controlled Component Fields + +If you're working with a fully-featured component library, or have your own production-ready components, you may want to integrate them with Redwood's forms seamlessly. +You can via Redwood forms' `useErrorStyles` hook and React Hook Form's `Controller` component. +The following example shows how you could go about integrating a component from [`primereact`](https://www.primefaces.org/primereact/) for use in in Redwood's forms like any of the named-input fields listed above: + +```tsx title="web/src/components/ToggleButtonField/ToggleButtonField.tsx" +import { ToggleButton } from 'primereact/togglebutton' +import type { ToggleButtonProps } from 'primereact/togglebutton' + +import { Controller, RegisterOptions, useErrorStyles } from '@redwoodjs/forms' + +interface Props extends ToggleButtonProps { + validation?: RegisterOptions + errorClassName?: string +} + +const ToggleButtonField = (props: Props) => { + const { + name, + className, + errorClassName, + defaultValue, + validation, + style, + ...propsRest + } = props + + const { className: componentClassName, style: componentStyle } = + useErrorStyles({ + className: className, + errorClassName: errorClassName, + name: name, + }) + + return ( + <Controller + name={name} + defaultValue={defaultValue} + rules={validation} + render={({ field: { onChange, onBlur, value, name, ref } }) => ( + <ToggleButton + {...propsRest} + checked={value} + onChange={onChange} + onBlur={onBlur} + ref={ref} + name={name} + className={componentClassName} + style={{ ...componentStyle, ...style }} + /> + )} + /> + ) +} + +export default ToggleButtonField +``` + +## `<SelectField>` + +Renders an HTML `<select>` tag. +It's possible to select multiple values using the `multiple` prop. +When `multiple` is `true`, this field returns an array of values in the same order as the list of options, not in the order they were selected. + +```jsx +<SelectField name="toppings" multiple={true}> + <option>'lettuce'</option> + <option>'tomato'</option> + <option>'pickle'</option> + <option>'cheese'</option> +</SelectField> + +// If the user chooses lettuce, tomato, and cheese, +// the onSubmit handler receives: +// +// { toppings: ["lettuce", "tomato", "cheese"] } +// +``` + +### Validation + +In these two examples, one with multiple-field selection, validation requires that a field be selected and that the user doesn't select the first value in the dropdown menu: + +```jsx +<SelectField + name="selectSingle" + validation={{ + required: true, + validate: { + matchesInitialValue: (value) => { + return ( + value !== 'Please select an option' || + 'Select an Option' + ) + }, + }, + }} +> + <option>Please select an option</option> + <option>Option 1</option> + <option>Option 2</option> +</SelectField> +<FieldError name="selectSingle" style={{ color: 'red' }} /> +``` + +```jsx {2} +<SelectField + multiple={true} + name="selectMultiple" + validation={{ + required: true, + validate: { + matchesInitialValue: (value) => { + let returnValue = [true] + returnValue = value.map((element) => { + if (element === 'Please select an option') + return 'Select an Option' + }) + return returnValue[0] + }, + }, + }} +> + <option>Please select an option</option> + <option>Option 1</option> + <option>Option 2</option> +</SelectField> +<FieldError name="selectMultiple" style={{ color: 'red' }} /> +``` + +### Coercion + +Typically, a `<SelectField>` returns a string, but you can use one of the `valueAs` properties to return another type. +An example use-case is when `<SelectField>` is being used to select a numeric identifier. +Without the `valueAsNumber` property, `<SelectField>` returns a string. +But, as per the example below, the `valueAsNumber` can be used to return an `Int`: + +```jsx +<SelectField name="select" validation={{ valueAsNumber: true }}> + <option value={1}>Option 1</option> + <option value={2}>Option 2</option> + <option value={3}>Option 3</option> +</SelectField> +``` + +If `Option 3` is selected, the `<Form>`'s `onSubmit` function is passed data as follows: + +```jsx +{ + select: 3, +} +``` + +## `<FieldError>` + +Renders a `<span>` containing a validation error message if the field with the same `name` attribute has a validation error. Otherwise renders nothing. + +```html +<FieldError name="name" className="error-message"> + +<!-- Renders: <span class="error-message">name is required</span> --> +``` diff --git a/docs/versioned_docs/version-7.0/graphql.md b/docs/versioned_docs/version-7.0/graphql.md new file mode 100644 index 000000000000..35d5c3fb2382 --- /dev/null +++ b/docs/versioned_docs/version-7.0/graphql.md @@ -0,0 +1,2513 @@ +--- +description: GraphQL is a fundamental part of Redwood +--- + +# GraphQL + +GraphQL is a fundamental part of Redwood. Having said that, you can get going without knowing anything about it, and can actually get quite far without ever having to read [the docs](https://graphql.org/learn/). But to master Redwood, you'll need to have more than just a vague notion of what GraphQL is. You'll have to really grok it. + + +## GraphQL 101 + +GraphQL is a query language that enhances the exchange of data between clients (in Redwood's case, a React app) and servers (a Redwood API). + +Unlike a REST API, a GraphQL Client performs operations that allow gathering a rich dataset in a single request. +There's three types of GraphQL operations, but here we'll only focus on two: Queries (to read data) and Mutations (to create, update, or delete data). + +The following GraphQL query: + +```graphql +query GetProject { + project(name: "GraphQL") { + id + title + description + owner { + id + username + } + tags { + id + name + } + } +} +``` + +returns the following JSON response: + +```json +{ + "data": { + "project": { + "id": 1, + "title": "My Project", + "description": "Lorem ipsum...", + "owner": { + "id": 11, + "username": "Redwood", + }, + "tags": [ + { "id": 22, "name": "graphql" } + ] + } + }, + "errors": null +} +``` + +Notice that the response's structure mirrors the query's. In this way, GraphQL makes fetching data descriptive and predictable. + +Again, unlike a REST API, a GraphQL API is built on a schema that specifies exactly which queries and mutations can be performed. +For the `GetProject` query above, here's the schema backing it: + +```graphql +type Project { + id: ID! + title: String + description: String + owner: User! + tags: [Tag] +} + +# ... User and Tag type definitions + +type Query { + project(name: String!): Project +} +``` + +:::info + +More information on GraphQL types can be found in the [official GraphQL documentation](https://graphql.org/learn/schema/). + +::: + +Finally, the GraphQL schema is associated with a resolvers map that helps resolve each requested field. For example, here's what the resolver for the owner field on the Project type may look like: + +```ts +export const Project = { + owner: (args, { root, context, info }) => { + return db.project.findUnique({ where: { id: root.id } }).user() + }, + // ... +} +``` + +:::info + +You can read more about resolvers in the dedicated [Understanding Default Resolvers](#understanding-default-resolvers) section below. + +::: + +To summarize, when a GraphQL query reaches a GraphQL API, here's what happens: + +``` ++--------------------+ +--------------------+ +| | 1.send operation | | +| | | GraphQL Server | +| GraphQL Client +----------------->| | | +| | | | 2.resolve | +| | | | data | ++--------------------+ | v | + ^ | +----------------+ | + | | | | | + | | | Resolvers | | + | | | | | + | | +--------+-------+ | + | 3. respond JSON with data | | | + +-----------------------------+ <--------+ | + | | + +--------------------+ +``` + +In contrast to most GraphQL implementations, Redwood provides a "deconstructed" way of creating a GraphQL API: + +- You define your SDLs (schema) in `*.sdl.js` files, which define what queries and mutations are available, and what fields can be returned +- For each query or mutation, you write a service function with the same name. This is the resolver +- Redwood then takes all your SDLs and Services (resolvers), combines them into a GraphQL server, and expose it as an endpoint + +## RedwoodJS and GraphQL + +Besides taking care of the annoying stuff for you (namely, mapping your resolvers, which gets annoying fast if you do it yourself!), there's not many gotchas with GraphQL in Redwood. +The only Redwood-specific thing you should really be aware of is [resolver args](#redwoods-resolver-args). + +Since there's two parts to GraphQL in Redwood, the client and the server, we've divided this doc up that way. + +On the `web` side, Redwood uses [Apollo Client](https://www.apollographql.com/docs/react/) by default though you can swap it out for something else if you want. + + +The `api` side offers a GraphQL server built on [GraphQL Yoga](https://www.graphql-yoga.com) and the [Envelop plugin system](https://www.envelop.dev/docs) from [The Guild](https://the-guild.dev). +### + +Redwood's api side is "serverless first", meaning it's architected as functions which can be deployed on either serverless or traditional infrastructure, and Redwood's GraphQL endpoint is effectively "just another function" (with a whole lot more going on under the hood, but that part is handled for you, out of the box). +One of the tenets of the Redwood philosophy is "Redwood believes that, as much as possible, you should be able to operate in a serverless mindset and deploy to a generic computational grid.” + +### GraphQL Yoga and the Generic Computation Grid + +To be able to deploy to a “generic computation grid” means that, as a developer, you should be able to deploy using the provider or technology of your choosing. You should be able to deploy to Netlify, Vercel, Fly, Render, AWS Serverless, or elsewhere with ease and no vendor or platform lock in. You should be in control of the framework, what the response looks like, and how your clients consume it. + +The same should be true of your GraphQL Server. [GraphQL Yoga](https://www.graphql-yoga.com) from [The Guild](https://the-guild.dev) makes that possible. + +> The fully-featured GraphQL Server with focus on easy setup, performance and great developer experience. + +RedwoodJS leverages Yoga's Envelop plugins to implement custom internal plugins to help with [authentication](#authentication), [logging](#logging), [directive handling](#directives), and more. + +### Security Best Practices + + +RedwoodJS implements GraphQL Armor from [Escape Technologies](https://escape.tech) to make your endpoint more secure by default by implementing common GraphQL [security best practices](#security). + +GraphQL Armor, developed by Escape in partnership with The Guild, is a middleware for JS servers that adds a security layer to the RedwoodJS GraphQL endpoint. + +### Trusted Documents + +In addition, RedwoodJS can be setup to enforce [persisted operations](https://the-guild.dev/graphql/yoga-server/docs/features/persisted-operations) -- alternatively called [Trusted Documents](https://benjie.dev/graphql/trusted-documents). + +See [Configure Trusted Documents](graphql/trusted-documents#configure-trusted-documents) for more information and usage instructions. + + +### Conclusion + +All this gets us closer to Redwood's goal of being able to deploy to a "generic computation grid". And that’s exciting! + +## Client-side + +### RedwoodApolloProvider + +By default, Redwood Apps come ready-to-query with the `RedwoodApolloProvider`. As you can tell from the name, this Provider wraps [ApolloProvider](https://www.apollographql.com/docs/react/api/react/hooks/#the-apolloprovider-component). Omitting a few things, this is what you'll normally see in Redwood Apps: + +```jsx title="web/src/App.js" +import { RedwoodApolloProvider } from '@redwoodjs/web/apollo' + +// ... + +const App = () => ( + <RedwoodApolloProvider> + <Routes /> + </RedwoodApolloProvider> +) + +// ... +``` + +You can use Apollo's `useQuery` and `useMutation` hooks by importing them from `@redwoodjs/web`, though if you're using `useQuery`, we recommend that you use a [Cell](cells.md): + +```jsx title="web/src/components/MutateButton.js" +import { useMutation } from '@redwoodjs/web' + +const MUTATION = gql` + # your mutation... +` + +const MutateButton = () => { + const [mutate] = useMutation(MUTATION) + + return ( + <button onClick={() => mutate({ ... })}> + Click to mutate + </button> + ) +} +``` + +Note that you're free to use any of Apollo's other hooks, you'll just have to import them from `@apollo/client` instead. In particular, these two hooks might come in handy: + +| Hook | Description | +| :------------------------------------------------------------------------------------------- | :------------------------------------------------------------------- | +| [useLazyQuery](https://www.apollographql.com/docs/react/api/react/hooks/#uselazyquery) | Execute queries in response to events other than component rendering | +| [useApolloClient](https://www.apollographql.com/docs/react/api/react/hooks/#useapolloclient) | Access your instance of `ApolloClient` | + +### Customizing the Apollo Client and Cache + +By default, `RedwoodApolloProvider` configures an `ApolloClient` instance with 1) a default instance of `InMemoryCache` to cache responses from the GraphQL API and 2) an `authMiddleware` to sign API requests for use with [Redwood's built-in auth](authentication.md). Beyond the `cache` and `link` params, which are used to set up that functionality, you can specify additional params to be passed to `ApolloClient` using the `graphQLClientConfig` prop. The full list of available configuration options for the client are [documented here on Apollo's site](https://www.apollographql.com/docs/react/api/core/ApolloClient/#options). + +Depending on your use case, you may want to configure `InMemoryCache`. For example, you may need to specify a type policy to change the key by which a model is cached or to enable pagination on a query. [This article from Apollo](https://www.apollographql.com/docs/react/caching/cache-configuration/) explains in further detail why and how you might want to do this. + +To configure the cache when it's created, use the `cacheConfig` property on `graphQLClientConfig`. Any value you pass is passed directly to `InMemoryCache` when it's created. + +For example, if you have a query named `search` that supports [Apollo's offset pagination](https://www.apollographql.com/docs/react/pagination/core-api/), you could enable it by specifying: + +```jsx +<RedwoodApolloProvider graphQLClientConfig={{ + cacheConfig: { + typePolicies: { + Query: { + fields: { + search: { + // Uses the offsetLimitPagination preset from "@apollo/client/utilities"; + ...offsetLimitPagination() + } + } + } + } + } +}}> +``` + + + +### Swapping out the RedwoodApolloProvider + +As long as you're willing to do a bit of configuring yourself, you can swap out `RedwoodApolloProvider` with your GraphQL Client of choice. You'll just have to get to know a bit of the make up of the [RedwoodApolloProvider](https://github.com/redwoodjs/redwood/blob/main/packages/web/src/apollo/index.tsx#L71-L84); it's actually composed of a few more Providers and hooks: + +- `FetchConfigProvider` +- `useFetchConfig` +- `GraphQLHooksProvider` + +For an example of configuring your own GraphQL Client, see the [redwoodjs-react-query-provider](https://www.npmjs.com/package/redwoodjs-react-query-provider). If you were thinking about using [react-query](https://react-query.tanstack.com/), you can also just go ahead and install it! + +Note that if you don't import `RedwoodApolloProvider`, it won't be included in your bundle, dropping your bundle size quite a lot! + +## Server-side + +### Understanding Default Resolvers + +According to the spec, for every field in your sdl, there has to be a resolver in your Services. But you'll usually see fewer resolvers in your Services than you technically should. And that's because if you don't define a resolver, GraphQL Yoga server will. + +The key question the Yoga server asks is: "Does the parent argument (in Redwood apps, the `parent` argument is named `root`—see [Redwood's Resolver Args](#redwoods-resolver-args)) have a property with this resolver's exact name?" Most of the time, especially with Prisma Client's ergonomic returns, the answer is yes. + +Let's walk through an example. Say our sdl looks like this: + +```jsx title="api/src/graphql/user.sdl.js" +export const schema = gql` + type User { + id: Int! + email: String! + name: String + } + + type Query { + users: [User!]! + } +` +``` + +So we have a User model in our `schema.prisma` that looks like this: + +```jsx +model User { + id Int @id @default(autoincrement()) + email String @unique + name String? +} +``` + +If you create your Services for this model using Redwood's generator (`yarn rw g service user`), your Services will look like this: + +```jsx title="api/src/services/user/user.js" +import { db } from 'src/lib/db' + +export const users = () => { + return db.user.findMany() +} +``` + +Which begs the question: where are the resolvers for the User fields—`id`, `email`, and `name`? +All we have is the resolver for the Query field, `users`. + +As we just mentioned, GraphQL Yoga defines them for you. And since the `root` argument for `id`, `email`, and `name` has a property with each resolvers' exact name (i.e. `root.id`, `root.email`, `root.name`), it'll return the property's value (instead of returning `undefined`, which is what Yoga would do if that weren't the case). + +But, if you wanted to be explicit about it, this is what it would look like: + +```jsx title="api/src/services/user/user.js" +import { db } from 'src/lib/db' + +export const users = () => { + return db.user.findMany() +} + +export const Users = { + id: (_args, { root }) => root.id, + email: (_args, { root }) => root.email, + name: (_args, { root }) => root.name, +} +``` + +The terminological way of saying this is, to create a resolver for a field on a type, in the Service, export an object with the same name as the type that has a property with the same name as the field. + +Sometimes you want to do this since you can do things like add completely custom fields this way: + +```jsx {5} +export const Users = { + id: (_args, { root }) => root.id, + email: (_args, { root }) => root.email, + name: (_args, { root }) => root.name, + age: (_args, { root }) => new Date().getFullYear() - root.birthDate.getFullYear() +} +``` + +<!-- Source: https://community.redwoodjs.com/t/how-to-create-field-resolver/195/7 --> + +### Redwood's Resolver Args + +[According to the spec](https://graphql.org/learn/execution/#root-fields-resolvers), resolvers take four arguments: `args`, `obj`, `context`, and `info`. In Redwood, resolvers do take these four arguments, but what they're named and how they're passed to resolvers is slightly different: + +- `args` is passed as the first argument +- `obj` is named `root` (all the rest keep their names) +- `root`, `context`, and `info` are wrapped into an object, `gqlArgs`; this object is passed as the second argument + +Here's an example to make things clear: + +```js +export const Post = { + user: (args, gqlArgs) => db.post.findUnique({ where: { id: gqlArgs?.root.id } }).user(), +} +``` + +Of the four, you'll see `args` and `root` being used a lot. + +| Argument | Description | +| :-------- | :------------------------------------------------------------------------------------------- | +| `args` | The arguments provided to the field in the GraphQL query | +| `root` | The previous return in the resolver chain | +| `context` | Holds important contextual information, like the currently logged in user | +| `info` | Holds field-specific information relevant to the current query as well as the schema details | + +> **There's so many terms!** +> +> Half the battle here is really just coming to terms. To keep your head from spinning, keep in mind that everybody tends to rename `obj` to something else: Redwood calls it `root`, GraphQL Yoga calls it `parent`. `obj` isn't exactly the most descriptive name in the world. + +### Context + +In Redwood, the `context` object that's passed to resolvers is actually available to all your Services, whether or not they're serving as resolvers. Just import it from `@redwoodjs/graphql-server`: + +```jsx +import { context } from '@redwoodjs/graphql-server' +``` + +#### How to Modify the Context + +Because the context is read-only in your services, if you need to modify it, then you need to do so in the `createGraphQLHandler`. + +To populate or enrich the context on a per-request basis with additional attributes, set the `context` attribute `createGraphQLHandler` to a custom ContextFunction that modifies the context. + +For example, if we want to populate a new, custom `ipAddress` attribute on the context with the information from the request's event, declare the `setIpAddress` ContextFunction as seen here: + +```jsx title="api/src/functions/graphql.js" +// ... + +const ipAddress = ({ event }) => { + return event?.headers?.['client-ip'] || event?.requestContext?.identity?.sourceIp || 'localhost' +} + +const setIpAddress = async ({ event, context }) => { + context.ipAddress = ipAddress({ event }) +} + +export const handler = createGraphQLHandler({ + getCurrentUser, + loggerConfig: { + logger, + options: { operationName: true, tracing: true }, + }, + schema: makeMergedSchema({ + schemas, + services, + }), + context: setIpAddress, + onException: () => { + // Disconnect from your database with an unhandled exception. + db.$disconnect() + }, +}) +``` + +> **Note:** If you use the preview GraphQL Yoga/Envelop `graphql-server` package and a custom ContextFunction to modify the context in the createGraphQL handler, the function is provided **_only the context_** and **_not the event_**. However, the `event` information is available as an attribute of the context as `context.event`. Therefore, in the above example, one would fetch the ip address from the event this way: `ipAddress({ event: context.event })`. + +### The Root Schema + +Did you know that you can query `redwood`? Try it in the GraphQL Playground (you can find the GraphQL Playground at http://localhost:8911/graphql when your dev server is running—`yarn rw dev api`): + +```graphql +query { + redwood { + version + currentUser + } +} +``` + +How is this possible? Via Redwood's [root schema](https://github.com/redwoodjs/redwood/blob/main/packages/graphql-server/src/rootSchema.ts). The root schema is where things like currentUser are defined: + +```graphql + scalar BigInt + scalar Date + scalar Time + scalar DateTime + scalar JSON + scalar JSONObject + + type Redwood { + version: String + currentUser: JSON + prismaVersion: String + } + + type Query { + redwood: Redwood + } +``` + +Now that you've seen the sdl, be sure to check out [the resolvers](https://github.com/redwoodjs/redwood/blob/main/packages/graphql-server/src/rootSchema.ts): + +```ts +export const resolvers: Resolvers = { + BigInt: BigIntResolver, + Date: DateResolver, + Time: TimeResolver, + DateTime: DateTimeResolver, + JSON: JSONResolver, + JSONObject: JSONObjectResolver, + Query: { + redwood: () => ({ + version: redwoodVersion, + prismaVersion: prismaVersion, + currentUser: (_args: any, context: GlobalContext) => { + return context?.currentUser + }, + }), + }, +} +``` + +<!-- ### The query workflow + +The GraphQL Playground's nice, but if you're a power user, you'll want to be using something a little more dedicated and always on; where you can save things like environments... + +<div class="relative pb-9/16"> + <iframe class="absolute inset-0 w-full h-full" src="https://www.youtube.com/watch?v=SU4g9_K0H1c" frameborder="0" allow="accelerometer; autoplay; encrypted-media; gyroscope; picture-in-picture; modestbranding; showinfo=0; fullscreen"></iframe> +</div> + +- todo +- link to claire's video +- dt has some thoughts on this +- insomnia --> + +## CORS Configuration + +CORS stands for [Cross Origin Resource Sharing](https://en.wikipedia.org/wiki/Cross-origin_resource_sharing); in a nutshell, by default, browsers aren't allowed to access resources outside their own domain. + +Let's say you're hosting each of your Redwood app's sides on different domains: the web side on `www.example.com` and the api side (and thus, the GraphQL Server) on `api.example.com`. +When the browser tries to fetch data from the `/graphql` function, you'll see an error that says the request was blocked due to CORS. Wording may vary, but it'll be similar to: + +> ⛔️ Access to fetch ... has been blocked by CORS policy: Response to preflight request doesn't pass access control check: No 'Access-Control-Allow-Origin' header is present on the requested resource. + +To fix this, you need to "configure CORS" by adding: + +``` +'Access-Control-Allow-Origin': 'https://example.com' +'Access-Control-Allow-Credentials': true +``` + +to the GraphQL response headers which you can do this by setting the `cors` option in `api/src/functions/graphql.{js|t}s`: + +```tsx +export const handler = createGraphQLHandler({ + loggerConfig: { logger, options: {} }, + directives, + sdls, + services, + cors: { + // 👈 setup your CORS configuration options + origin: '*', + credentials: true, + }, + onException: () => { + // Disconnect from your database with an unhandled exception. + db.$disconnect() + }, +}) +``` + +For more in-depth discussion and configuration of CORS when it comes to using a cookie-based auth system (like [dbAuth](authentication.md#self-hosted-auth-installation-and-setup)), see the [CORS documentation](cors.md). + +## Health Checks + +You can use health checks to determine if a server is available and ready to start serving traffic. +For example, services like [Pingdom](https://www.pingdom.com) use health checks to determine server uptime and will notify you if it becomes unavailable. + +Redwood's GraphQL server provides a health check endpoint at `/graphql/health` as part of its GraphQL handler. +If the server is healthy and can accept requests, the response will contain the following headers: + +``` +content-type: application/json +server: GraphQL Yoga +x-yoga-id: yoga +``` + +and will return a `HTTP/1.1 200 OK` status with the body: + +```json +{ + "message": "alive" +} +``` + +Note the `x-yoga-id` header. The header's value defaults to `yoga` when `healthCheckId` isn't set in `createGraphQLHandler`. But you can customize it when configuring your GraphQL handler: + +```ts title="api/src/functions/graphql.ts" +// ... + +export const handler = createGraphQLHandler({ + // This will be the value of the `x-yoga-id` header + // highlight-next-line + healthCheckId: 'my-redwood-graphql-server', + getCurrentUser, + loggerConfig: { logger, options: {} }, + directives, + sdls, + services, + onException: () => { + // Disconnect from your database with an unhandled exception. + db.$disconnect() + }, +}) +``` + +If the health check fails, then the GraphQL server is unavailable and you should investigate what could be causing the downtime. + +#### Perform a Health Check + +To perform a health check, make a HTTP GET request to the `/graphql/health` endpoint. + +For local development, +with the proxy using `curl` from the command line: + +```bash +curl "http://localhost:8910/.redwood/functions/graphql/health" -i +``` + +or by directly invoking the graphql function: + +```bash +curl "http://localhost:8911/graphql/health" -i +``` + +you should get the response: + +```json +{ + "message": "alive" +} +``` + +For production, make a request wherever your `/graphql` function exists. + +> These examples use `curl` but you can perform a health check via any HTTP GET request. + +#### Perform a Readiness Check + +A readiness check confirms that your GraphQL server can accept requests and serve **your server's** traffic. + +It forwards a request to the health check with a header that must match your `healthCheckId` in order to succeed. +If the `healthCheckId` doesn't match or the request fails, then your GraphQL server isn't "ready". + +To perform a readiness check, make a HTTP GET request to the `/graphql/readiness` endpoint with the appropriate `healthCheckId` header. +For local development, you can make a request to the proxy: + +```bash +curl "http://localhost:8910/.redwood/functions/graphql/readiness" \ + -H 'x-yoga-id: yoga' \ + -i +``` + +or directly invoke the graphql function: + +```bash +curl "http://localhost:8911/graphql/readiness" \ + -H 'x-yoga-id: yoga' \ + -i +``` + +Either way, you should get a `200 OK` HTTP status if ready, or a `503 Service Unavailable` if not. + +For production, make a request wherever your `/graphql` function exists. + +> These examples use `curl` but you can perform a readiness check via any HTTP GET request with the proper headers. + +## Verifying GraphQL Schema + +In order to keep your GraphQL endpoint and services secure, you must specify one of `@requireAuth`, `@skipAuth` or a custom directive on **every** query and mutation defined in your SDL. + +Redwood will verify that your schema complies with these runs when: + +- building (or building just the api) +- launching the dev server. + +If any fail this check, you will see: + +- each query of mutation listed in the command's error log +- a fatal error `⚠️ GraphQL server crashed` if launching the server + +### Build-time Verification + +When building via the `yarn rw build` command and the SDL fails verification, you will see output that lists each query or mutation missing the directive: + +```bash + ✔ Generating Prisma Client... + ✖ Verifying graphql schema... + → - deletePost Mutation + Building API... + Cleaning Web... + Building Web... + Prerendering Web... + +You must specify one of @requireAuth, @skipAuth or a custom directive for +- contacts Query +- posts Query +- post Query +- createContact Mutation +- createPost Mutation +- updatePost Mutation +- deletePost Mutation +``` + +### Dev Server Verification + +When launching the dev server via the `yarn rw dev` command, you will see output that lists each query or mutation missing the directive: + +```bash + +gen | Generating TypeScript definitions and GraphQL schemas... +gen | 37 files generated +api | Building... Took 444 ms +api | Starting API Server... Took 2 ms +api | Listening on http://localhost:8911/ +api | Importing Server Functions... +web | ... +api | FATAL [2021-09-24 18:41:49.700 +0000]: +api | ⚠️ GraphQL server crashed +api | +api | Error: You must specify one of @requireAuth, @skipAuth or a custom directive for +api | - contacts Query +api | - posts Query +api | - post Query +api | - createContact Mutation +api | - createPost Mutation +api | - updatePost Mutation +api | - deletePost Mutation +``` + +To fix these errors, simple declare with `@requireAuth` to enforce authentication or `@skipAuth` to keep the operation public on each as appropriate for your app's permissions needs. + +## Custom Scalars + +GraphQL scalar types give data meaning and validate that their values makes sense. Out of the box, GraphQL comes with `Int`, `Float`, `String`, `Boolean` and `ID`. While those can cover a wide variety of use cases, you may need more specific scalar types to better describe and validate your application's data. + +For example, if there's a `Person` type in your schema that has a field like `ageInYears`, if it's actually supposed to represent a person's age, technically it should only be a positive integer—never a negative one. +Something like the [`PositiveInt` scalar](https://www.graphql-scalars.dev/docs/scalars/positive-int) provides that meaning and validation. + +### Scalars vs Service vs Directives + +How are custom scalars different from Service Validations or Validator Directives? + +[Service validations](services.md#service-validations) run when resolving the service. Because they run at the start of your Service function and throw if conditions aren't met, they're great for validating whenever you use a Service—anywhere, anytime. +For example, they'll validate via GraphQL, Serverless Functions, webhooks, etc. Custom scalars, however, only validate via GraphQL and not anywhere else. + +Service validations also perform more fine-grained checks than scalars which are more geared toward validating that data is of a specific **type**. + +[Validator Directives](#directives) control user **access** to data and also whether or not a user is authorized to perform certain queries and/or mutations. + +### How To Add a Custom Scalar + +Let's say that you have a `Product` type that has three fields: a name, a description, and the type of currency. +The built-in `String` scalar should suffice for the first two, but for the third, you'd be better off with a more-specific `String` scalar that only accepts [ISO 4217](https://en.wikipedia.org/wiki/ISO_4217) currency codes, like `USD`, `EUR`, `CAD`, etc. +Luckily there's already a [`Currency` scalar type](https://github.com/Urigo/graphql-scalars/blob/master/src/scalars/Currency.ts) that does exactly that! +All you have to do is add it to your GraphQL schema. + +To add a custom scalar to your GraphQL schema: + +1. Add the scalar definition to one of your sdl files, such as `api/src/graphql/scalars.sdl.ts` + +> Note that you may have to create this file. Moreover, it's just a convention—custom scalar type definitions can be in any of your sdl files. + +```jsx title="api/src/graphql/scalars.sdl.ts" +export const schema = gql` + scalar Currency +` +``` + +<br /> + +2. Import the scalar's definition and resolver and pass them to your GraphQLHandler via the `schemaOptions` property: + +```tsx {10-13} title="api/src/functions/graphql.ts" +import { CurrencyDefinition, CurrencyResolver } from 'graphql-scalars' + +// ... + +export const handler = createGraphQLHandler({ + loggerConfig: { logger, options: {} }, + directives, + sdls, + services, + schemaOptions: { + typeDefs: [CurrencyDefinition], + resolvers: { Currency: CurrencyResolver }, + }, + onException: () => { + // Disconnect from your database with an unhandled exception. + db.$disconnect() + }, +}) +``` + +<br /> + +3. Use the scalar in your types + +```tsx {6,18,24} +export const schema = gql` + type Product { + id: Int! + name: String! + description: String! + currency_iso_4217: Currency! // validate on query + createdAt: DateTime! + } + + type Query { + products: [Product!]! @requireAuth + product(id: Int!): Product @requireAuth + } + + input CreateProductInput { + name: String! + description: String! + currency_iso_4217: Currency! // validate on mutation + } + + input UpdateProductInput { + name: String + description: String + currency_iso_4217: Currency // validate on mutation + } + + type Mutation { + createProduct(input: CreateProductInput!): Product! @requireAuth + updateProduct(id: Int!, input: UpdateProductInput!): Product! @requireAuth + deleteProduct(id: Int!): Product! @requireAuth + } +` +``` + +## Directives + +Directives supercharge your GraphQL services. They add configuration to fields, types or operations that act like "middleware" that lets you run reusable code during GraphQL execution to perform tasks like [authentication](#authentication), formatting, and more. + +You'll recognize a directive by its preceded by the `@` character, e.g. `@myDirective`, and by being declared alongside a field: + +```tsx +type Bar { + name: String! @myDirective +} +``` + +or a Query or Mutation: + +```tsx +type Query { + bars: [Bar!]! @myDirective +} + +type Mutation { + createBar(input: CreateBarInput!): Bar! @myDirective +} +``` + +See the [Directives](directives) section for complete information on RedwoodJS Directives. + +## Fragments + +See [fragments](graphql/fragments.md) + +## Unions + +Unions are abstract GraphQL types that enable a schema field to return one of multiple object types. + +`union FavoriteTree = Redwood | Ginkgo | Oak` + +A field can have a union as its return type. + +```tsx +type Query { + searchTrees: [FavoriteTree] // This list can include Redwood, Gingko or Oak objects +} +``` + +All of a union's included types must be object types and do not need to share any fields. + +To query a union, you can take advantage on [inline fragments](https://graphql.org/learn/queries/#inline-fragments) to include subfields of multiple possible types. + +```tsx +query GetFavoriteTrees { + __typename // typename is helpful when querying a field that returns one of multiple types + searchTrees { + ... on Redwood { + name + height + } + ... on Ginkgo { + name + medicalUse + } + ... on Oak { + name + acornType + } + } +} +``` + +Redwood will automatically detect your union types in your `sdl` files and resolve *which* of your union's types is being returned. If the returned object does not match any of the valid types, the associated operation will produce a GraphQL error. + +:::note + +In order to use Union types web-side with your Apollo GraphQL client, you will need to [generate possible types from fragments and union types](#generate-possible-types). + +::: + +### useCache + +Apollo Client stores the results of your GraphQL queries in a local, normalized, in-memory cache. This enables the client to respond almost immediately to queries for already-cached data, without even sending a network request. + +useCache is a custom hook that returns the cache object and some useful methods to interact with the cache: + +* [evict](#evict) +* [extract](#extract) +* [identify](#identify) +* [modify](#modify) +* [resetStore](#resetStore) +* [clearStore](#clearStore) + +```ts +import { useCache } from '@redwoodjs/web/apollo' +``` + +#### cache + +Returns the normalized, in-memory cache. + +```ts +import { useCache } from '@redwoodjs/web/apollo' + +const { cache } = useCache() +``` + +#### evict + +Either removes a normalized object from the cache or removes a specific field from a normalized object in the cache. + +```ts +import { useCache } from '@redwoodjs/web/apollo' + + +const Fruit = ({ id }: { id: FragmentIdentifier }) => { + const { evict } = useCache() + const { data: fruit, complete } = useRegisteredFragment<Fruit>(id) + + evict(fruit) +} +``` + +#### extract + +Returns a serialized representation of the cache's current contents + +```ts +import { useCache } from '@redwoodjs/web/apollo' + +const Fruit = ({ id }: { id: FragmentIdentifier }) => { + const { extract } = useCache() + + // Logs the cache's current contents + console.log(extract()) + +``` + +#### identify + +```ts +import { useCache } from '@redwoodjs/web/apollo' + +const Fruit = ({ id }: { id: FragmentIdentifier }) => { + const { identify } = useCache() + const { data: fruit, complete } = useRegisteredFragment<Fruit>(id) + + // Returns "Fruit:ownpc6co8a1w5bhfmavecko9" + console.log(identify(fruit)) +} +``` + +#### modify + +Modifies one or more field values of a cached object. Must provide a modifier function for each field to modify. A modifier function takes a cached field's current value and returns the value that should replace it. + +Returns true if the cache was modified successfully and false otherwise. + +```ts +import { useCache } from '@redwoodjs/web/apollo' + +const Fruit = ({ id }: { id: FragmentIdentifier }) => { + const { modify } = useCache() + const { data: fruit, complete } = useRegisteredFragment<Fruit>(id) + + // Modify the name of a given fruit entity to be uppercase + + <button onClick={() => modify(fruit, { + name(cachedName) { + return cachedName.toUpperCase() + }})}> + Uppercase {fruit.name} + </button> + + // ... +} +``` + +#### clearStore + +To reset the cache without refetching active queries, use the clearStore method. + + +```ts +import { useCache } from '@redwoodjs/web/apollo' + +const Fruit = ({ id }: { id: FragmentIdentifier }) => { + const { clearStore } = useCache() + + clearStore() +} +``` + +#### resetStore + +Reset the cache entirely, such as when a user logs out. + +```ts +import { useCache } from '@redwoodjs/web/apollo' + +const Fruit = ({ id }: { id: FragmentIdentifier }) => { + const { resetStore } = useCache() + + resetStore() +} +``` + +## GraphQL Handler Setup + +Redwood's `GraphQLHandlerOptions` allows you to configure your GraphQL handler schema, context, authentication, security and more. + +```ts +export interface GraphQLHandlerOptions { + /** + * @description The identifier used in the GraphQL health check response. + * It verifies readiness when sent as a header in the readiness check request. + * + * By default, the identifier is `yoga` as seen in the HTTP response header `x-yoga-id: yoga` + */ + healthCheckId?: string + + /** + * @description Customize GraphQL Logger + * + * Collect resolver timings, and exposes trace data for + * an individual request under extensions as part of the GraphQL response. + */ + loggerConfig: LoggerConfig + + /** + * @description Modify the resolver and global context. + */ + context?: Context | ContextFunction + + /** + * @description An async function that maps the auth token retrieved from the + * request headers to an object. + * Is it executed when the `auth-provider` contains one of the supported + * providers. + */ + getCurrentUser?: GetCurrentUser + + /** + * @description A callback when an unhandled exception occurs. Use this to disconnect your prisma instance. + */ + onException?: () => void + + /** + * @description Services passed from the glob import: + * import services from 'src/services\/**\/*.{js,ts}' + */ + services: ServicesGlobImports + + /** + * @description SDLs (schema definitions) passed from the glob import: + * import sdls from 'src/graphql\/**\/*.{js,ts}' + */ + sdls: SdlGlobImports + + /** + * @description Directives passed from the glob import: + * import directives from 'src/directives/**\/*.{js,ts}' + */ + directives?: DirectiveGlobImports + + /** + * @description A list of options passed to [makeExecutableSchema] + * (https://www.graphql-tools.com/docs/generate-schema/#makeexecutableschemaoptions). + */ + schemaOptions?: Partial<IExecutableSchemaDefinition> + + /** + * @description CORS configuration + */ + cors?: CorsConfig + + /** + * @description Customize GraphQL Armor plugin configuration + * + * @see https://escape-technologies.github.io/graphql-armor/docs/configuration/examples + */ + armorConfig?: ArmorConfig + + /** + * @description Customize the default error message used to mask errors. + * + * By default, the masked error message is "Something went wrong" + * + * @see https://github.com/dotansimha/envelop/blob/main/packages/core/docs/use-masked-errors.md + */ + defaultError?: string + + /** + * @description Only allows the specified operation types (e.g. subscription, query or mutation). + * + * By default, only allow query and mutation (ie, do not allow subscriptions). + * + * An array of GraphQL's OperationTypeNode enums: + * - OperationTypeNode.SUBSCRIPTION + * - OperationTypeNode.QUERY + * - OperationTypeNode.MUTATION + * + * @see https://github.com/dotansimha/envelop/tree/main/packages/plugins/filter-operation-type + */ + allowedOperations?: AllowedOperations + + /** + * @description Custom Envelop plugins + */ + extraPlugins?: Plugin[] + + /** + * @description Auth-provider specific token decoder + */ + authDecoder?: Decoder + + /** + * @description Customize the GraphiQL Endpoint that appears in the location bar of the GraphQL Playground + * + * Defaults to '/graphql' as this value must match the name of the `graphql` function on the api-side. + */ + graphiQLEndpoint?: string + /** + * @description Function that returns custom headers (as string) for GraphiQL. + * + * Headers must set auth-provider, Authorization and (if using dbAuth) the encrypted cookie. + */ + generateGraphiQLHeader?: GenerateGraphiQLHeader +} +``` + +### Directive Setup + +Redwood makes it easy to code, organize, and map your directives into the GraphQL schema. + +You simply add them to the `directives` directory and the `createGraphQLHandler` will do all the work. + +```tsx title="api/src/functions/graphql.ts" +import { createGraphQLHandler } from '@redwoodjs/graphql-server' + +import directives from 'src/directives/**/*.{js,ts}' // 👈 directives live here +import sdls from 'src/graphql/**/*.sdl.{js,ts}' +import services from 'src/services/**/*.{js,ts}' + +import { db } from 'src/lib/db' +import { logger } from 'src/lib/logger' + +export const handler = createGraphQLHandler({ + loggerConfig: { logger, options: {} }, + armorConfig, // 👈 custom GraphQL Security configuration + directives, // 👈 directives are added to the schema here + sdls, + services, + onException: () => { + // Disconnect from your database with an unhandled exception. + db.$disconnect() + }, +}) +``` + +> Note: Check-out the [in-depth look at Redwood Directives](directives) that explains how to generate directives so you may use them to validate access and transform the response. + + +### Logging Setup + +For a details on setting up GraphQL Logging, see [Logging](#logging). +### Security Setup + +For a details on setting up GraphQL Security, see [Security](#security). +## Logging + +Logging is essential in production apps to be alerted about critical errors and to be able to respond effectively to support issues. In staging and development environments, logging helps you debug queries, resolvers and cell requests. + +We want to make logging simple when using RedwoodJS and therefore have configured the api-side GraphQL handler to log common information about your queries and mutations. Log statements also be optionally enriched with [operation names](https://graphql.org/learn/queries/#operation-name), user agents, request ids, and performance timings to give you more visibility into your GraphQL api. + +By configuring the GraphQL handler to use your api side [RedwoodJS logger](logger), any errors and other log statements about the [GraphQL execution](https://graphql.org/learn/execution/) will be logged to the [destination](logger#destination-aka-where-to-log) you've set up: to standard output, file, or transport stream. + +You configure the logger using the `loggerConfig` that accepts a [`logger`](logger) and a set of [GraphQL Logger Options](#graphql-logger-options). + +### Configure the GraphQL Logger + +A typical GraphQLHandler `graphql.ts` is as follows: + +```jsx title="api/src/functions/graphql.ts" +// ... + +import { logger } from 'src/lib/logger' + +// ... +export const handler = createGraphQLHandler({ + loggerConfig: { logger, options: {} }, + // ... +}) +``` + +#### Log Common Information + +The `loggerConfig` takes several options that logs meaningful information along the graphQL execution lifecycle. + +| Option | Description | +| :------------ | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| data | Include response data sent to client. | +| operationName | Include operation name. The operation name is a meaningful and explicit name for your operation. It is only required in multi-operation documents, but its use is encouraged because it is very helpful for debugging and server-side logging. When something goes wrong (you see errors either in your network logs, or in the logs of your GraphQL server) it is easier to identify a query in your codebase by name instead of trying to decipher the contents. Think of this just like a function name in your favorite programming language. See https://graphql.org/learn/queries/#operation-name | +| requestId | Include the event's requestId, or if none, generate a uuid as an identifier. | +| query | Include the query. This is the query or mutation (with fields) made in the request. | +| tracing | Include the tracing and timing information. This will log various performance timings within the GraphQL event lifecycle (parsing, validating, executing, etc). | +| userAgent | Include the browser (or client's) user agent. This can be helpful to know what type of client made the request to resolve issues when encountering errors or unexpected behavior. | + +Therefore, if you wish to log the GraphQL `query` made, the `data` returned, and the `operationName` used, you would + +```jsx title="api/src/functions/graphql.ts" +export const handler = createGraphQLHandler({ + loggerConfig: { + logger, + options: { data: true, operationName: true, query: true }, + }, + // ... +}) +``` + +#### Exclude Operations + +You can exclude GraphQL operations by name with `excludeOperations`. +This is useful when you want to filter out certain operations from the log output, for example, `IntrospectionQuery` from GraphQL playground: + +```jsx {5} title="api/src/functions/graphql.ts" +export const handler = createGraphQLHandler({ + loggerConfig: { + logger, + options: { excludeOperations: ['IntrospectionQuery'] }, + }, + directives, + sdls, + services, + onException: () => { + // Disconnect from your database with an unhandled exception. + db.$disconnect() + }, +}) +``` + +> **Relevant anatomy of an operation** +> +> In the example below, `"FilteredQuery"` is the operation's name. +> That's what you'd pass to `excludeOperations` if you wanted it filtered out. +> +> ```js +> export const filteredQuery = ` +> query FilteredQuery { +> me { +> id +> name +> } +> } +> ``` + +### Benefits of Logging + +Benefits of logging common GraphQL request information include debugging, profiling, and resolving issue reports. + +#### Operation Name Identifies Cells + +The [operation name](https://graphql.org/learn/queries/#operation-name) is a meaningful and explicit name for your operation. It is only required in multi-operation documents, but its use is encouraged because it is very helpful for debugging and server-side logging. + +Because your cell typically has a unique operation name, logging this can help you identify which cell made a request. + +```jsx title="api/src/functions/graphql.ts" +// ... +export const handler = createGraphQLHandler({ + loggerConfig: { logger, options: { operationName: true } }, +// ... +``` + +#### RequestId for Support Issue Resolution + +Often times, your deployment provider will provide a request identifier to help reconcile and track down problems at an infrastructure level. For example, AWS API Gateway and AWS Lambda (used by Netlify, for example) provides `requestId` on the `event`. + +You can include the request identifier setting the `requestId` logger option to `true`. + +```jsx title="api/src/functions/graphql.ts" +// ... +export const handler = createGraphQLHandler({ + loggerConfig: { logger, options: { requestId: true } }, +// ... +``` + +And then, when working to resolve a support issue with your deployment provider, you can supply this request id to help them track down and investigate the problem more easily. + +#### No Need to Log within Services + +By configuring your GraphQL logger to include `data` and `query` information about each request you can keep your service implementation clean, concise and free of repeated logger statements in every resolver -- and still log the useful debugging information. + +```jsx title="api/src/functions/graphql.ts" +// ... +export const handler = createGraphQLHandler({ + loggerConfig: { logger, options: { data: true, operationName: true, query: true } }, +// ... + +// api/src/services/posts.js +//... +export const post = async ({ id }) => { + return await db.post.findUnique({ + where: { id }, + }) +} +//... +``` + +The GraphQL handler will then take care of logging your query and data -- as long as your logger is setup to log at the `info` [level](logger#log-level) and above. + +> You can also disable the statements in production by just logging at the `warn` [level](logger#log-level) or above + +This means that you can keep your services free of logger statements, but still see what's happening! + +```bash +api | POST /graphql 200 7.754 ms - 1772 +api | DEBUG [2021-09-29 16:04:09.313 +0000] (graphql-server): GraphQL execution started: BlogPostQuery +api | operationName: "BlogPostQuery" +api | query: { +api | "id": 3 +api | } +api | DEBUG [2021-09-29 16:04:09.321 +0000] (graphql-server): GraphQL execution completed: BlogPostQuery +api | data: { +api | "post": { +api | "id": 3, +api | "body": "Meh waistcoat succulents umami asymmetrical, hoodie post-ironic paleo chillwave tote bag. Trust fund kitsch waistcoat vape, cray offal gochujang food truck cloud bread enamel pin forage. Roof party chambray ugh occupy fam stumptown. Dreamcatcher tousled snackwave, typewriter lyft unicorn pabst portland blue bottle locavore squid PBR&B tattooed.", +api | "createdAt": "2021-09-24T16:51:06.198Z", +api | "__typename": "Post" +api | } +api | } +api | operationName: "BlogPostQuery" +api | query: { +api | "id": 3 +api | } +api | POST /graphql 200 9.386 ms - 441 +``` + +#### Send to Third-party Transports + +Stream to third-party log and application monitoring services vital to production logging in serverless environments like [logFlare](https://logflare.app/), [Datadog](https://www.datadoghq.com/) or [LogDNA](https://www.logdna.com/) + +#### Supports Log Redaction + +Everyone has heard of reports that Company X logged emails, or passwords to files or systems that may not have been secured. While RedwoodJS logging won't necessarily prevent that, it does provide you with the mechanism to ensure that won't happen. + +To redact sensitive information, you can supply paths to keys that hold sensitive data using the RedwoodJS logger [redact option](logger#redaction). + +Because this logger is used with the GraphQL handler, it will respect any redaction paths setup. + +For example, you have chosen to log `data` return by each request, then you may want to redact sensitive information, like email addresses from your logs. + +Here is an example of an application `/api/src/lib/logger.ts` configured to redact email addresses. Take note of the path `data.users[*].email` as this says, in the `data` attribute, redact the `email` from every `user`: + +```jsx title="/api/src/lib/logger.ts" +import { createLogger, redactionsList } from '@redwoodjs/api/logger' + +export const logger = createLogger({ + options: { + redact: [...redactionsList, 'email', 'data.users[*].email'], + }, +}) +``` + +#### Timing Traces and Metrics + +Often you want to measure and report how long your queries take to execute and respond. You may already be measuring these durations at the database level, but you can also measure the time it takes for your the GraphQL server to parse, validate, and execute the request. + +You may turn on logging these metrics via the `tracing` GraphQL configuration option. + +```jsx title="api/src/functions/graphql.ts" +// ... +export const handler = createGraphQLHandler({ + loggerConfig: { logger, options: { tracing: true } }, +// ... +``` + +Let's say we wanted to get some benchmark numbers for the "find post by id" resolver + +```jsx +return await db.post.findUnique({ + where: { id }, +}) +``` + +We see that this request took about 500 msecs (note: duration is reported in nanoseconds). + +For more details about the information logged and its format, see [Apollo Tracing](https://github.com/apollographql/apollo-tracing). + +```bash +pi | INFO [2021-07-09 14:25:52.452 +0000] (graphql-server): GraphQL willSendResponse +api | tracing: { +api | "version": 1, +api | "startTime": "2021-07-09T14:25:51.931Z", +api | "endTime": "2021-07-09T14:25:52.452Z", +api | "duration": 521131526, +api | "execution": { +api | "resolvers": [ +api | { +api | "path": [ +api | "post" +api | ], +api | "parentType": "Query", +api | "fieldName": "post", +api | "returnType": "Post!", +api | "startOffset": 1787428, +api | "duration": 519121497 +api | }, +api | { +api | "path": [ +api | "post", +api | "id" +api | ], +api | "parentType": "Post", +api | "fieldName": "id", +api | "returnType": "Int!", +api | "startOffset": 520982888, +api | "duration": 25140 +api | }, +... more paths follow ... +api | ] +api | } +api | } +``` + +By logging the operation name and extracting the duration for each query, you can easily collect and benchmark query performance. + +## Security + +Parsing a GraphQL operation document is a very expensive and compute intensive operation that blocks the JavaScript event loop. If an attacker sends a very complex operation document with slight variations over and over again he can easily degrade the performance of the GraphQL server. + +RedwoodJS will by default reject a variety malicious operation documents; that is, it'll prevent attackers from making malicious queries or mutations. + +RedwoodJS is configured out-of-the-box with GraphQL security best practices: + +* Schema Directive-based Authentication including RBAC validation +* Production Deploys disable Introspection and GraphQL Playground automatically +* Reject Malicious Operation Documents (Max Aliases, Max Cost, Max Depth, Max Directives, Max Tokens) +* Prevent Information Leaks (Block Field Suggestions, Mask Errors) + +And with the Yoga Envelop Plugin ecosystem available to you, there are options for: + +* CSRF Protection +* Rate Limiting +* and more. + +### Authentication + +By default, your GraphQL endpoint is open to the world. + +That means anyone can request any query and invoke any Mutation. +Whatever types and fields are defined in your SDL is data that anyone can access. + +Redwood [encourages being secure by default](directives) by defaulting all queries and mutations to have the `@requireAuth` directive when generating SDL or a service. + +When your app builds and your server starts up, Redwood checks that **all** queries and mutations have `@requireAuth`, `@skipAuth` or a custom directive applied. + +If not, then your build will fail: + +```bash + ✖ Verifying graphql schema... + Building API... + Cleaning Web... + Building Web... + Prerendering Web... +You must specify one of @requireAuth, @skipAuth or a custom directive for +- contacts Query +- posts Query +- post Query +- updatePost Mutation +- deletePost Mutation +``` + +or your server won't startup and you should see that "Schema validation failed": + +```bash +gen | Generating TypeScript definitions and GraphQL schemas... +gen | 47 files generated +api | Building... Took 593 ms +api | [GQL Server Error] - Schema validation failed +api | ---------------------------------------- +api | You must specify one of @requireAuth, @skipAuth or a custom directive for +api | - posts Query +api | - createPost Mutation +api | - updatePost Mutation +api | - deletePost Mutation +``` + +To correct, just add the appropriate directive to your queries and mutations. + +If not, then your build will fail and your server won't startup. + +#### @requireAuth + +To enforce authentication, simply add the `@requireAuth` directive in your GraphQL schema for any query or field you want protected. + +It's your responsibility to implement the `requireAuth()` function in your app's `api/src/lib/auth.{js|ts}` to check if the user is properly authenticated and/or has the expected role membership. + +The `@requireAuth` directive will call the `requireAuth()` function to determine if the user is authenticated or not. + +Here we enforce that a user must be logged in to `create`. `update` or `delete` a `Post`. + +```ts +type Post { + id: Int! + title: String! + body: String! + authorId: Int! + author: User! + createdAt: DateTime! +} + +input CreatePostInput { + title: String! + body: String! + authorId: Int! +} + +input UpdatePostInput { + title: String + body: String + authorId: Int +} + +type Mutation { + createPost(input: CreatePostInput!): Post! @requireAuth + updatePost(id: Int!, input: UpdatePostInput!): Post! @requireAuth + deletePost(id: Int!): Post! @requireAuth +} +``` + +It's your responsibility to implement the `requireAuth()` function in your app's `api/src/lib/auth.{js|ts}` to check if the user is properly authenticated and/or has the expected role membership. + +The `@requireAuth` directive will call the requireAuth() function to determine if the user is authenticated or not. + +```ts title="api/src/lib/auth.ts" +// ... + +export const isAuthenticated = (): boolean => { + return true // 👈 replace with the appropriate check +} + +// ... + +export const requireAuth = ({ roles }: { roles: AllowedRoles }) => { + if (isAuthenticated()) { + throw new AuthenticationError("You don't have permission to do that.") + } + + if (!hasRole({ roles })) { + throw new ForbiddenError("You don't have access to do that.") + } +} +``` + +> **Note**: The `auth.ts` file here is the stub for a new RedwoodJS app. Once you have setup auth with your provider, this will enforce a proper authentication check. + +##### Field-level Auth + +You can apply the `@requireAuth` to any field as well (not just queries or mutations): + +```ts +type Post { + id: Int! + title: String! + body: String! @requireAuth + authorId: Int! + author: User! + createdAt: DateTime! +} +``` + +##### Role-based Access Control + +The `@requireAuth` directive lets you define roles that are permitted to perform the operation: + +```ts +type Mutation { + createPost(input: CreatePostInput!): Post! @requireAuth(roles: ['AUTHOR', 'EDITOR']) + updatePost(id: Int!, input: UpdatePostInput!): Post! @requireAuth(roles: ['EDITOR'] + deletePost(id: Int!): Post! @requireAuth(roles: ['ADMIN'] +} +``` + +#### @skipAuth + +If, however, you want your query or mutation to be public, then simply use `@skipAuth`. + +In the example, fetching all posts or a single post is allowed for all users, authenticated or not. + +```ts +type Post { + id: Int! + title: String! + body: String! + authorId: Int! + author: User! + createdAt: DateTime! +} + +type Query { + posts: [Post!]! @skipAuth + post(id: Int!): Post @skipAuth +} +``` + +### Introspection and Playground Disabled in Production + +Because it is often useful to ask a GraphQL schema for information about what queries it supports, GraphQL allows us to do so using the [introspection](https://graphql.org/learn/introspection/) system. + +The [GraphQL Playground](https://www.graphql-yoga.com/docs/features/graphiql) is a way for you to interact with your schema and try out queries and mutations. It can show you the schema by inspecting it. You can find the GraphQL Playground at [http://localhost:8911/graphql](http://localhost:8911/graphql) when your dev server is running. + +> Because both introspection and the playground share possibly sensitive information about your data model, your data, your queries and mutations, best practices for deploying a GraphQL Server call to disable these in production, RedwoodJS **, by default, only enables introspection and the playground when running in development**. That is when `process.env.NODE_ENV === 'development'`. + +However, there may be cases where you want to enable introspection as well as the GraphQL PLaygrouns. You can enable introspection by setting the `allowIntrospection` option to `true` and enable GraphiQL by setting `allowGraphiQL` to `true`. + +Here is an example of `createGraphQLHandler` function with the `allowIntrospection` and `allowGraphiQL` options set to `true`: +```ts {8} +export const handler = createGraphQLHandler({ + authDecoder, + getCurrentUser, + loggerConfig: { logger, options: {} }, + directives, + sdls, + services, + allowIntrospection: true, // 👈 enable introspection in all environments + allowGraphiQL: true, // 👈 enable GraphiQL Playground in all environments + onException: () => { + // Disconnect from your database with an unhandled exception. + db.$disconnect() + }, +}) +``` + +:::warning + +Enabling introspection in production may pose a security risk, as it allows users to access information about your schema, queries, and mutations. Use this option with caution and make sure to secure your GraphQL API properly. + +The may be cases where one wants to allow introspection, but not GraphiQL. + +Or, you may want to enable GraphiQL, but not allow introspection; for example, to try out known queries, but not to share the entire set of possible operations and types. + +::: + + +### GraphQL Armor Configuration + +[GraphQL Armor](https://escape.tech/graphql-armor/) is a middleware that adds a security layer the RedwoodJS GraphQL endpoint configured with sensible defaults. + +You don't have to configure anything to enforce protection against alias, cost, depth, directive, tokens abuse in GraphQL operations as well as to block field suggestions or revealing error messages that might leak sensitive information. + +But, if you need to enable, disable to modify the default settings, GraphQL Armor is fully configurable in a per-plugin fashion. + +Simply define and provide a custom GraphQL Security configuration to your `createGraphQLHandler`: + +```ts +export const handler = createGraphQLHandler({ + authDecoder, + getCurrentUser, + loggerConfig: { logger, options: {} }, + directives, + sdls, + services, + armorConfig, // 👈 custom GraphQL Security configuration + onException: () => { + // Disconnect from your database with an unhandled exception. + db.$disconnect() + }, +}) +``` + +For example, the default max query depth limit is 6. To change that setting to 2 levels, simply provide the configuration to your handler: + +```ts +export const handler = createGraphQLHandler({ + authDecoder, + getCurrentUser, + loggerConfig: { logger, options: {} }, + directives, + sdls, + services, + armorConfig: { maxDepth: { n: 2 } }, + onException: () => { + // Disconnect from your database with an unhandled exception. + db.$disconnect() + }, +}) +``` + +#### Max Aliases + +This protection is enabled by default. + +Limit the number of aliases in a document. Defaults to 15. + +##### Example + +Aliases allow you to rename the data that is returned in a query’s results. They manipulate the structure of the query result that is fetched from your service, displaying it according to your web component's needs. + +This contrived example uses 11 alias to rename a Post's id and title to various permutations of post, article, and blog to return a different shape in the query result as `articles`: + +```ts + { + articles: posts { + id + articleId: id + postId: id + articlePostId: id + postArticleId: id + blogId: id + title + articleTitle: title + postTitle: title + articlePostTitle: title + postArticleTitle: title + blogTitle: title + } +} +``` + +##### Configuration and Defaults + +Limit the number of aliases in a document. Defaults to 15. + +You can change the default value via the `maxAliases` setting when creating your GraphQL handler. + +```ts +{ + maxAliases: { + enabled: true, + n: 15, + } +} +``` +#### Cost Limit + +This protection is enabled by default. + +It analyzes incoming GraphQL queries and applies a cost analysis algorithm to prevent resource overload by blocking too expensive requests (DoS attack attempts). + +The cost computation is quite simple (and naive) at the moment but there are plans to make it evolve toward a extensive plugin with many features. + +Defaults to a overall maxCost limit of 5000. + +##### Overview + +Cost is a factor of the kind of field and depth. Total Cost is a cumulative sum of each field based on its type and its depth in the query. + +Scalar fields -- those that return values like strings or numbers -- are worth one value; whereas are objects are worth another. + +How deep they are nested in the query is a multiplier factor such that: + +``` +COST = FIELD_KIND_COST * (DEPTH * DEPTH_COST_FACTOR) +TOTAL_COST = SUM(COST) +``` + +If the `TOTAL_COST` exceeds the `maxCost`, an error stops GraphQL execution and rejects the request. + +You have control over the field kind and depth costs settings, but the defaults are: + +``` +objectCost: 2, // cost of retrieving an object +scalarCost: 1, // cost of retrieving a scalar +depthCostFactor: 1.5, // multiplicative cost of depth +``` + +##### Example + +In this small example, we have one object field `me` that contains two, nested scalar fields `id` and `me`. There is an operation `profile` (which is neither a scalar nor object and thus ignored as part of the cost calculation). + +```ts +{ + profile { + me { + id + user + } + } +} +``` +The cost breakdown for cost is: + +* two scalars `id` and `user` worth 1 each +* they are at level 1 depth with a depth factor of 1.5 +* 2 \* ( 1 \* 1.5 ) = 2 \* 1.5 = 3 +* their parent object is `me` worth 2 + +Therefore the total cost is 2 + 3 = 5. + +:::note +The operation definition `query` of `profile` is ignored in the calculation. This is the case even if you name your query `MY_PROFILE` like: + +``` +{ + profile MY_PROFILE { + me { + id + user + } + } +} +``` +::: + +##### Configuration and Defaults + +Defaults to a overall maxCost limit of 5000. + +You can change the default value via the `costLimit` setting when creating your GraphQL handler. + + +```ts +{ + costLimit: { + enabled: true, + maxCost: 5000, // maximum cost of a request before it is rejected + objectCost: 2, // cost of retrieving an object + scalarCost: 1, // cost of retrieving a scalar + depthCostFactor: 1.5, // multiplicative cost of depth + } +} +``` + +#### Max Depth Limit + +This protection is enabled by default. + +Limit the depth of a document. Defaults to 6 levels. + +Attackers often submit expensive, nested queries to abuse query depth that could overload your database or expend costly resources. + +Typically, these types of unbounded, complex and expensive GraphQL queries are usually huge deeply nested and take advantage of an understanding of your schema (hence why schema introspection is disabled by default in production) and the data model relationships to create "cyclical" queries. + +##### Example + +An example of a cyclical query here takes advantage of knowing that an author has posts and each post has an author ... that has posts ... that has an another that ... etc. + +This cyclical query has a depth of 8. + +```jsx +// cyclical query example +// depth: 8+ +query cyclical { + author(id: 'jules-verne') { + posts { + author { + posts { + author { + posts { + author { + ... { + ... # more deep nesting! + } + } + } + } + } + } + } + } +} +``` +##### Configuration and Defaults + +Defaults to 6 levels. + +You can change the default value via the `maxDepth` setting when creating your GraphQL handler. + +```ts +{ + maxDepth: { + enabled: true, + n: 6, + } +} +``` + +#### Max Directives + +This protections is enabled by default. + +Limit the number of directives in a document. Defaults to 50. + +##### Example + +The following example demonstrates that by using the `@include` and `@skip` GraphQL query directives one can design a large request that requires computation, but in fact returns the expected response ... + +```ts +{ + posts { + id @include(if:true) + id @include(if:false) + id @include(if:false) + id @skip(if:true) + id @skip(if:true) + id @skip(if:true)) + title @include(if:true) + title @include(if:false) + title @include(if:false) + title @skip(if:true) + title @skip(if:true) + title @skip(if:true) + } +} +``` + +... of formatted Posts with just a single id and title. + +```ts +{ + "data": { + "posts": [ + { + "id": 1, + "title": "A little more about RedwoodJS" + }, + { + "id": 2, + "title": "What is GraphQL?" + }, + { + "id": 3, + "title": "Welcome to the RedwoodJS Community!" + }, + { + "id": 4, + "title": "10 ways to secure your GraphQL endpoint" + } + ] + } +} +``` + +By limiting the maximum number of directives in the document, malicious queries can be rejected. + +##### Configuration and Defaults + +You can change the default value via the `maxDirectives` setting when creating your GraphQL handler. + +```ts +{ + maxDirectives: { + enabled: true, + n: 50, + } +} +``` +#### Max Tokens + +This protection is enabled by default. + +Limit the number of GraphQL tokens in a document. + + In computer science, lexical analysis, lexing or tokenization is the process of converting a sequence of characters into a sequence of lexical tokens. + + E.g. given the following GraphQL operation. + +```ts + graphql { + me { + id + user + } + } +``` + + The tokens are `query`, `{`, `me`, `{`, `id`, `user`, `}` and `}`. Having a total count of 8 tokens. + +##### Example + +Given the query with 8 tokens: + +```ts + graphql { + me { + id + user + } + } +``` + +And a custom configuration to all a maximum of two tokens: + +``` +const armorConfig = { + maxTokens: { n: 2 }, +} +``` + +An error is raised: + +``` +'Syntax Error: Token limit of 2 exceeded, found 3.' +``` + +:::note + +When reporting the number of found tokens, then number found is not the total tokens, but the value when found that exceeded the limit. + +Therefore found would be n + 1. +::: + +##### Configuration and Defaults + +Defaults to 1000. + +You can change the default value via the `maxTokens` setting when creating your GraphQL handler. + +```ts +{ + maxTokens: { + enabled: true, + n: 1000, + } +} +``` +#### Block Field Suggestions + +This plugin is enabled by default. + +It will prevent suggesting fields in case of an erroneous request. Suggestions can lead to the leak of your schema even with disabled introspection, which can be very detrimental in case of a private API. + +Example of such a suggestion: + +`Cannot query field "sta" on type "Media". Did you mean "stats", "staff", or "status"?` + +##### Example +##### Configuration and Defaults + +Enabled by default. + +You can change the default value via the `blockFieldSuggestions` setting when creating your GraphQL handler. + +```ts +{ + blockFieldSuggestion: { + enabled: true, + } +} +``` +Enabling will hide the field suggestion: + +`Cannot query field "sta" on type "Media". [Suggestion hidden]?` + +Orm if you want a custom mask: + +```ts +{ + + blockFieldSuggestion: { + mask: '<REDACTED>' + }, +} +``` + +``Cannot query field "sta" on type "Media". [REDACTED]?` + + +### Error Masking + +In many GraphQL servers, when an error is thrown, the details of that error are leaked to the outside world. The error and its message are then returned in the response and a client might reveal those errors in logs or even render the message to the user. You could potentially leak sensitive or other information about your app you don't want to share—such as database connection failures or even the presence of certain fields. + +Redwood is here to help! + +Redwood prevents leaking sensitive error-stack information out-of-the-box for unexpected errors. +If an error that isn't one of [Redwood's GraphQL Errors](#redwood-errors) or isn't based on a GraphQLError is thrown: + +- The original error and its message will be logged using the defined GraphQL logger, so you'll know what went wrong +- A default message "Something went wrong" will replace the error message in the response (Note: you can customize this message) + +#### Customizing the Error Message + +But what if you still want to share an error message with client? +Simply use one of [Redwood's GraphQL Errors](#redwood-errors) and your custom message will be shared with your users. + +#### Customizing the Default Error Message + +You can customize the default "Something went wrong" message used when the error is masked via the `defaultError` setting on the `createGraphQLHandler`: + +```tsx +export const handler = createGraphQLHandler({ + loggerConfig: { logger, options: {} }, + directives, + sdls, + services, + defaultError: 'Sorry about that', // 👈 Customize the error message + onException: () => { + // Disconnect from your database with an unhandled exception. + db.$disconnect() + }, +}) +``` + +#### Redwood Errors + +Redwood Errors are inspired from [Apollo Server Error codes](https://www.apollographql.com/docs/apollo-server/data/errors/#error-codes) for common use cases: + +To use a Redwood Error, import each from `@redwoodjs/graphql-server`. + +- `SyntaxError` - An unspecified error occurred +- `ValidationError` - Invalid input to a service +- `AuthenticationError` - Failed to authenticate +- `ForbiddenError` - Unauthorized to access +- `UserInputError` - Missing input to a service + +If you use one of the errors, then the message provided will not be masked and will be shared in the GraphQL response: + +```tsx +import { UserInputError } from '@redwoodjs/graphql-server' +// ... +throw new UserInputError('An email is required.') +``` + +then the message provided will not be masked and it will be shred in the GraphQL response. + +##### Custom Errors and Uses + +Need you own custom error and message? + +Maybe you're integrating with a third-party api and want to handle errors from that service and also want control of how that error is shared with your user client-side. + +Simply extend from `RedwoodError` and you're all set! + +```tsx +export class MyCustomError extends RedwoodError { + constructor(message: string, extensions?: Record<string, any>) { + super(message, extensions) + } +} +``` + +For example, in your service, you can create and use it to handle the error and return a friendly message: + +```tsx +export class WeatherError extends RedwoodError { + constructor(message: string, extensions?: Record<string, any>) { + super(message, extensions) + } +} + +export const getWeather = async ({ input }: WeatherInput) { + try { + const weather = weatherClient.get(input.zipCode) + } catch(error) { + // rate limit issue + if (error.statusCode = 429) { + throw new WeatherError('Unable to get the latest weather updates at the moment. Please try again shortly.') + } + + // other error + throw new WeatherError(`We could not get the weather for ${input.zipCode}.`) + } +} +``` + +#### CSRF Prevention + +If you have CORS enabled, almost all requests coming from the browser will have a preflight request - however, some requests are deemed "simple" and don't make a preflight. One example of such a request is a good ol' GET request without any headers, this request can be marked as "simple" and have preflight CORS checks skipped therefore skipping the CORS check. + +This attack can be mitigated by saying: "all GET requests must have a custom header set". This would force all clients to manipulate the headers of GET requests, marking them as "_not-_simple" and therefore always executing a preflight request. + +You can achieve this by using the [`@graphql-yoga/plugin-csrf-prevention` GraphQL Yoga plugin](https://the-guild.dev/graphql/yoga-server/docs/features/csrf-prevention). + +## Self-Documenting GraphQL API + +RedwoodJS helps you document your GraphQL API by generating commented SDL used for GraphiQL and the GraphQL Playground explorer -- as well as can be turned into API docs using tools like [Docusaurus](#use-in-docusaurus). + +If you specify the SDL generator with its `--docs` option, any comments (which the [GraphQL spec](https://spec.graphql.org/October2021/#sec-Descriptions) calls "descriptions") will be incorporated into your RedwoodJS app's `graphql.schema` file when generating types. + +If you comment your Prisma schema models, its fields, or enums, the SDL generator will use those comments as the documentation. + +If there is no Prisma comment, then the SDL generator will default a comment that you can then edit. + +:::note +If you re-generate the SDL, any custom comments will be overwritten. +However, if you make those edits in your Prisma schema, then those will be used. +::: + +### Prisma Schema Comments + +Your Prisma schema is documented with triple slash comments (`///`) that precedes: + +* Model names +* Enum names +* each Model field name + +``` +/// A blog post. +model Post { + /// The unique identifier of a post. + id Int @id @default(autoincrement()) + /// The title of a post. + title String + /// The content of a post. + body String + /// When the post was created. + createdAt DateTime @default(now()) +} + +/// A list of allowed colors. +enum Color { + RED + GREEN + BLUE +} +``` + +### SDL Comments + +When used with `--docs` option, [SDL generator](cli-commands#generate-sdl) adds comments for: + +* Directives +* Queries +* Mutations +* Input Types + +:::note +By default, the `--docs` option to the SDL generator is false and comments are not created. +::: + +Comments [enclosed in `"""` or `"`]([GraphQL spec](https://spec.graphql.org/October2021/#sec-Descriptions) in your sdl files will be included in the generated GraphQL schema at the root of your project (.redwood/schema.graphql). + +``` +""" +Use to check whether or not a user is authenticated and is associated +with an optional set of roles. +""" +directive @requireAuth(roles: [String]) on FIELD_DEFINITION + +"""Use to skip authentication checks and allow public access.""" +directive @skipAuth on FIELD_DEFINITION + +""" +Autogenerated input type of InputPost. +""" +input CreatePostInput { + "The content of a post." + body: String! + + "The title of a post." + title: String! +} + +""" +Autogenerated input type of UpdatePost. +""" +input UpdatePostInput { + "The content of a post." + body: String + + "The title of a post." + title: String +} + +""" +A blog post. +""" +type Post { + "The content of a post." + body: String! + + "Description for createdAt." + createdAt: DateTime! + + "The unique identifier of a post." + id: Int! + + "The title of a post." + title: String! +} + +""" +About mutations +""" +type Mutation { + "Creates a new Post." + createPost(input: CreatePostInput!): Post! + + "Deletes an existing Post." + deletePost(id: Int!): Post! + + "Updates an existing Post." + updatePost(id: Int!, input: UpdatePostInput!): Post! +} + +""" +About queries +""" +type Query { + "Fetch a Post by id." + post(id: Int!): Post + + "Fetch Posts." + posts: [Post!]! +} +``` + +#### Root Schema + +Documentation is also generated for the Redwood Root Schema that defines details about Redwood such as the current user and version information. +``` +type Query { + "Fetches the Redwood root schema." + redwood: Redwood +} + +""" +The Redwood Root Schema + +Defines details about Redwood such as the current user and version information. +""" +type Redwood { + "The current user." + currentUser: JSON + + "The version of Prisma." + prismaVersion: String + + "The version of Redwood." + version: String +} + +scalar BigInt +scalar Date +scalar DateTime +scalar JSON +scalar JSONObject +scalar Time + +``` + +### Preview in GraphiQL + +The [GraphQL Playground aka GraphiQL](https://www.graphql-yoga.com/docs/features/graphiql) is a way for you to interact with your schema and try out queries and mutations. It can show you the schema by inspecting it. You can find the GraphQL Playground at [http://localhost:8911/graphql](http://localhost:8911/graphql) when your dev server is running. + +The documentation generated is present when exploring the schema. + +#### Queries + +<img alt="graphiql-queries" src="/img/graphql-api-docs/graphiql-queries.png" width="400" /> + +#### Mutations + +<img alt="graphiql-mutations" src="/img/graphql-api-docs/graphiql-mutations.png" width="400" /> + +#### Model Types + +<img alt="graphiql-type" src="/img/graphql-api-docs/graphiql-type.png" width="400" /> + +#### Input Types + +<img alt="graphiql-input-type" src="/img/graphql-api-docs/graphiql-input-type.png" width="400" /> + +### Use in Docusaurus + +If your project uses [Docusaurus](https://docusaurus.io), the generated commented SDL can be used to publish documentation using the [graphql-markdown](https://graphql-markdown.github.io) plugin. + +#### Basic Setup + +The following is some basic setup information, but please consult [Docusaurus](https://docusaurus.io) and the [graphql-markdown](https://graphql-markdown.github.io) for latest instructions. + +1. Install Docusaurus (if you have not done so already) + +```terminal +npx create-docusaurus@latest docs classic +``` + + +Add `docs` to your `workspaces` in the project's `package.json`: + +``` + "workspaces": { + "packages": [ + "docs", + "api", + "web", + "packages/*" + ] + }, +``` + +2. Ensure a `docs` directory exists at the root of your project + +```terminal +mkdir docs // if needed +``` + +3. Install the GraphQL Generators Plugin + +```terminal +yarn workspace docs add @edno/docusaurus2-graphql-doc-generator graphql +``` + +4. Ensure a Directory for your GraphQL APi generated documentation resides in with the Docusaurus directory `/docs` structure + +```terminal +// Change into the "docs" workspace + +cd docs + +// you should have the "docs" directory and within that a "graphql-api" directory +mkdir docs/graphql-api // if needed +``` + +5. Update `docs/docusaurus.config.js` and configure the plugin and navbar + +``` +// docs/docusaurus.config.js +// ... + plugins: [ + [ + '@edno/docusaurus2-graphql-doc-generator', + { + schema: '../.redwood/schema.graphql', + rootPath: './docs', + baseURL: 'graphql-api', + linkRoot: '../..', + }, + ], + ], +// ... +themeConfig: + /** @type {import('@docusaurus/preset-classic').ThemeConfig} */ + ({ + navbar: { + title: 'My Site', + logo: { + alt: 'My Site Logo', + src: 'img/logo.svg', + }, + items: [ + { + to: '/docs/graphql-api', // adjust the location depending on your baseURL (see configuration) + label: 'GraphQL API', // change the label with yours + position: 'right', + }, +//... +``` +6. Update `docs/sidebars.js` to include the generated `graphql-api/sidebar-schema.js` + +``` +// docs/sidebars.js +/** + * Creating a sidebar enables you to: + * - create an ordered group of docs + * - render a sidebar for each doc of that group + * - provide next/previous navigation + * + * The sidebars can be generated from the filesystem, or explicitly defined here. + * + * Create as many sidebars as you want. + */ + +// @ts-check + +/** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */ +const sidebars = { + // By default, Docusaurus generates a sidebar from the docs folder structure + tutorialSidebar: [ + { + type: 'autogenerated', + dirName: '.', + }, + ], + ...require('./docs/graphql-api/sidebar-schema.js'), +} + +module.exports = sidebars +``` + +7. Generate the docs + +`yarn docusaurus graphql-to-doc` + +:::tip +You can overwrite the generated docs and bypass the plugin's diffMethod use `--force`. + +``yarn docusaurus graphql-to-doc --force` +::: + +8. Start Docusaurus + +``` +yarn start +``` + +##### Example Screens + +##### Schema Documentation +![graphql-doc-example-main](/img/graphql-api-docs/schema-doc.png) + +##### Type Example +![graphql-doc-example-type](/img/graphql-api-docs/contact-type.png) + +##### Query Example +![graphql-doc-example-query](/img/graphql-api-docs/contact-query.png) + +##### Mutation Example +![graphql-doc-example-mutation](/img/graphql-api-docs/schema-mutation.png) + +##### Directive Example +![graphql-doc-example-directive](/img/graphql-api-docs/schema-directive.png) + +##### Scalar Example +![graphql-doc-example-scalar](/img/graphql-api-docs/schema-scalar.png) + +## FAQ + +### Why Doesn't Redwood Use Something Like Nexus? + +This might be one of our most frequently asked questions of all time. Here's [Tom's response in the forum](https://community.redwoodjs.com/t/anyone-playing-around-with-nexus-js/360/5): + +> We started with Nexus, but ended up pulling it out because we felt like it was too much of an abstraction over the SDL. It’s so nice being able to just read the raw SDL to see what the GraphQL API is. + +<!-- TODO --> +<!-- This https://community.redwoodjs.com/t/how-to-add-resolvetype-resolver-for-interfaces/432/7 --> + +## Further Reading + +Eager to learn more about GraphQL? Check out some of the resources below: +- [GraphQL.wtf](https://graphql.wtf) covers most aspects of GraphQL and publishes one short video a week +- The official GraphQL Yoga (the GraphQL server powering Redwood) [tutorial](https://www.graphql-yoga.com/tutorial/basic/00-introduction) is the best place to get your hands on GraphQL basics +- And of course, [the official GraphQL docs](https://graphql.org/learn/) are great place to do a deep dive into exactly how GraphQL works diff --git a/docs/versioned_docs/version-7.0/graphql/fragments.md b/docs/versioned_docs/version-7.0/graphql/fragments.md new file mode 100644 index 000000000000..71f693423fa4 --- /dev/null +++ b/docs/versioned_docs/version-7.0/graphql/fragments.md @@ -0,0 +1,310 @@ +# Fragments + +[GraphQL fragments](https://graphql.org/learn/queries/#fragments) are reusable units of GraphQL queries that allow developers to define a set of fields that can be included in multiple queries. Fragments help improve code organization, reduce duplication, and make GraphQL queries more maintainable. They are particularly useful when you want to request the same set of fields on different parts of your data model or when you want to share query structures across multiple components or pages in your application. + +## What are Fragments? + +Here are some key points about GraphQL fragments: + +1. **Reusability**: Fragments allow you to define a set of fields once and reuse them in multiple queries. This reduces redundancy and makes your code more DRY (Don't Repeat Yourself). + +2. **Readability**: Fragments make queries more readable by separating the query structure from the actual query usage. This can lead to cleaner and more maintainable code. + +3. **Maintainability**: When you need to make changes to the requested fields, you only need to update the fragment definition in one place, and all queries using that fragment will automatically reflect the changes. + +## Basic Usage + +Here's a basic example of how you might use GraphQL fragments in developer documentation: + +Let's say you have a GraphQL schema representing books, and you want to create a fragment for retrieving basic book information like title, author, and publication year. + + +```graphql +# Define a GraphQL fragment for book information +fragment BookInfo on Book { + id + title + author + publicationYear +} + +# Example query using the BookInfo fragment +query GetBookDetails($bookId: ID!) { + book(id: $bookId) { + ...BookInfo + description + # Include other fields specific to this query + } +} +``` + +In this example: + +- We've defined a fragment called `BookInfo` that specifies the fields we want for book information. +- In the `GetBookDetails` query, we use the `...BookInfo` spread syntax to include the fields defined in the fragment. +- We also include additional fields specific to this query, such as `description`. + +By using the `BookInfo` fragment, you can maintain a consistent set of fields for book information across different parts of your application without duplicating the field selection in every query. This improves code maintainability and reduces the chance of errors. + +In developer documentation, you can explain the purpose of the fragment, provide examples like the one above, and encourage developers to use fragments to organize and reuse their GraphQL queries effectively. + +## Using Fragments in RedwoodJS + +RedwoodJS makes it easy to use fragments, especially with VS Code and Apollo GraphQL Client. + +First, RedwoodJS instructs the VS Code GraphQL Plugin where to look for fragments by configuring the `documents` attribute of your project's `graphql.config.js`: + +```js +// graphql.config.js + +const { getPaths } = require('@redwoodjs/internal') + +module.exports = { + schema: getPaths().generated.schema, + documents: './web/src/**/!(*.d).{ts,tsx,js,jsx}', // 👈 Tells VS Code plugin where to find fragments +} +``` + +Second, RedwoodJS automatically creates the [fragmentRegistry](https://www.apollographql.com/docs/react/data/fragments/#registering-named-fragments-using-createfragmentregistry) needed for Apollo to know about the fragments in your project without needing to interpolate their declarations. + +Redwood exports ways to interact with fragments in the `@redwoodjs/web/apollo` package. + +``` +import { fragmentRegistry, registerFragment } from '@redwoodjs/web/apollo' +``` + +With `fragmentRegistry`, you can interact with the registry directly. + +With `registerFragment`, you can register a fragment with the registry and get back: + + ```ts + { fragment, typename, getCacheKey, useRegisteredFragment } + ``` + +which can then be used to work with the registered fragment. + +### Setup + +`yarn rw setup graphql fragments` + +See more in [cli commands - setup graphql fragments](../cli-commands.md#setup-graphql-fragments). + +### registerFragment + +To register a fragment, you can simply register it with `registerFragment`. + +```ts +import { registerFragment } from '@redwoodjs/web/apollo' + +registerFragment( + gql` + fragment BookInfo on Book { + id + title + author + publicationYear + } + ` +) +``` + +This makes the `BookInfo` available to use in your query: + + +```ts +import type { GetBookDetails } from 'types/graphql' + +import { useQuery } from '@redwoodjs/web' + +import BookInfo from 'src/components/BookInfo' + +const GET_BOOK_DETAILS = gql` + query GetBookDetails($bookId: ID!) { + book(id: $bookId) { + ...BookInfo + description + # Include other fields specific to this query + } + } + +... + +const { data, loading} = useQuery<GetBookDetails>(GET_BOOK_DETAILS) + +``` + + +You can then access the book info from `data` and render: + +```ts +{!loading && ( + <div key={`book-id-${id}`}> + <h3>Title: {data.title}</h3> + <p>by {data.author} ({data.publicationYear})<> + </div> +)} +``` + +### fragment + +Access the original fragment you registered. + +```ts +import { fragment } from '@redwoodjs/web/apollo' +``` + +### typename + +Access typename of fragment you registered. + +```ts +import { typename } from '@redwoodjs/web/apollo' +``` + +For example, with + +```graphql +# Define a GraphQL fragment for book information +fragment BookInfo on Book { + id + title + author + publicationYear +} +``` + +the `typename` is `Book`. + + +## useCache!!! + +## getCacheKey + +A helper function to create the cache key for the data associated with the fragment in Apollo cache. + +```ts +import { getCacheKey } from '@redwoodjs/web/apollo' +``` + +For example, with + +```graphql +# Define a GraphQL fragment for book information +fragment BookInfo on Book { + id + title + author + publicationYear +} +``` + +the `getCacheKey` is a function where `getCacheKey(42)` would return `Book:42`. + +### useRegisteredFragment + +```ts +import { registerFragment } from '@redwoodjs/web/apollo' + +const { useRegisteredFragment } = registerFragment( + // ... +) +``` + +A helper function relies on Apollo's [`useFragment` hook](https://www.apollographql.com/docs/react/data/fragments/#usefragment) in Apollo cache. + +The useFragment hook represents a lightweight live binding into the Apollo Client Cache. It enables Apollo Client to broadcast specific fragment results to individual components. This hook returns an always-up-to-date view of whatever data the cache currently contains for a given fragment. useFragment never triggers network requests of its own. + + +This means that once the Apollo Client Cache has loaded the data needed for the fragment, one can simply render the data for the fragment component with its id reference. + +Also, anywhere the fragment component is rendered will be updated with teh latest data if any of `useQuery` with uses the fragment received new data. + +```ts +import type { Book } from 'types/graphql' + +import { registerFragment } from '@redwoodjs/web/apollo' + +const { useRegisteredFragment } = registerFragment( + gql` + fragment BookInfo on Book { + id + title + author + publicationYear + } + ` +) + +const Book = ({ id }: { id: string }) => { + const { data, complete } = useRegisteredFragment<Book>(id) + + return ( + complete && ( + <div key={`book-id-${id}`}> + <h3>Title: {data.title}</h3> + <p>by {data.author} ({data.publicationYear})<> + </div> + ) + ) +} + +export default Book +``` + +:::note +In order to use [fragments](#fragments) with [unions](#unions) and interfaces in Apollo Client, you need to tell the client how to discriminate between the different types that implement or belong to a supertype. + +Please see how to [generate possible types from fragments and union types](#generate-possible-types). +::: + + +## Possible Types for Unions + +In order to use [fragments](#fragments) with [unions](#unions) and interfaces in Apollo Client, you need to tell the client how to discriminate between the different types that implement or belong to a supertype. + +You pass a possibleTypes option to the InMemoryCache constructor to specify these relationships in your schema. + +This object maps the name of an interface or union type (the supertype) to the types that implement or belong to it (the subtypes). + +For example: + +```ts +/// web/src/App.tsx + +<RedwoodApolloProvider graphQLClientConfig={{ + cacheConfig: { + possibleTypes: { + Character: ["Jedi", "Droid"], + Test: ["PassingTest", "FailingTest", "SkippedTest"], + Snake: ["Viper", "Python"], + Groceries: ['Fruit', 'Vegetable'], + }, + }, +}}> +``` + +To make this easier to maintain, RedwoodJS GraphQL CodeGen automatically generates `possibleTypes` so you can simply assign it to the `graphQLClientConfig`: + + +```ts +// web/src/App.tsx + +import possibleTypes from 'src/graphql/possibleTypes' + +// ... + +const graphQLClientConfig = { + cacheConfig: { + ...possibleTypes, + }, +} + +<RedwoodApolloProvider graphQLClientConfig={graphQLClientConfig}> +``` + +To generate the `src/graphql/possibleTypes` file, enable fragments in `redwood.toml`: + +```toml title=redwood.toml +[graphql] + fragments = true +``` diff --git a/docs/versioned_docs/version-7.0/graphql/mocking-graphql-requests.md b/docs/versioned_docs/version-7.0/graphql/mocking-graphql-requests.md new file mode 100644 index 000000000000..9251d5ce6717 --- /dev/null +++ b/docs/versioned_docs/version-7.0/graphql/mocking-graphql-requests.md @@ -0,0 +1,161 @@ +--- +description: Mock GraphQL requests to test your components +--- + +# Mocking GraphQL Requests + +Testing and building components without having to rely on the API is a good best practice. Redwood makes this possible via `mockGraphQLQuery` and `mockGraphQLMutation`. + +The argument signatures of these functions are identical. Internally, they target different operation types based on their suffix. + +```jsx +mockGraphQLQuery('OperationName', (variables, { ctx, req }) => { + ctx.delay(1500) // pause for 1.5 seconds + return { + userProfile: { + id: 42, + name: 'peterp', + } + } +}) +``` + +## The operation name + +The first argument is the [operation name](https://graphql.org/learn/queries/#operation-name); it's used to associate mock-data with a query or a mutation: + +```jsx +query UserProfileQuery { /*...*/ } +mockGraphQLQuery('UserProfileQuery', { /*... */ }) +``` + +```jsx +mutation SetUserProfile { /*...*/ } +mockGraphQLMutation('SetUserProfile', { /*... */ }) +``` + +Operation names should be unique. + +## The mock-data + +The second argument can be an object or a function: + +```jsx {1} +mockGraphQLQuery('OperationName', (variables, { ctx }) => { + ctx.delay(1500) // pause for 1.5 seconds + return { + userProfile: { + id: 42, + name: 'peterp', + } + } +}) +``` + +If it's a function, it'll receive two arguments: `variables` and `{ ctx }`. The `ctx` object allows you to make adjustments to the response with the following functions: + +- `ctx.status(code: number, text?: string)`: set a http response code: + +```jsx {2} +mockGraphQLQuery('OperationName', (_variables, { ctx }) => { + ctx.status(404) +}) +``` + +<br/> + +- `ctx.delay(numOfMS)`: delay the response + +```jsx {2} +mockGraphQLQuery('OperationName', (_variables, { ctx }) => { + ctx.delay(1500) // pause for 1.5 seconds + return { id: 42 } +}) +``` + +<br/> + +- `ctx.errors(e: GraphQLError[])`: return an error object in the response: + +```jsx {2} +mockGraphQLQuery('OperationName', (_variables, { ctx }) => { + ctx.errors([{ message: 'Uh, oh!' }]) +}) +``` + +## TypeScript +You can get stricter types by passing types when mocking the query, mutation and its variables: + +```tsx +import type { UserProfileQuery, UserProfileQueryVariables } from 'types/graphql' + +mockGraphQLQuery<UserProfileQuery, UserProfileQueryVariables>('UserProfileQuery', { /*... */ }) +``` +or, you can manually pass your own types: + +```tsx +mockGraphQLQuery<{ + userProfile: { + id: number, + name: string, + } +}>('UserProfileQuery', { /*... */ }) +``` + +## Global mock-requests vs local mock-requests + +Placing your mock-requests in `"<name>.mock.js"` will cause them to be globally scoped in Storybook, making them available to all stories. + +> **All stories?** +> +> In React, it's often the case that a single component will have a deeply nested component that perform a GraphQL query or mutation. Having to mock those requests for every story can be painful and tedious. + +Using `mockGraphQLQuery` or `mockGraphQLMutation` inside a story is locally scoped and will overwrite a globally-scoped mock-request. + +We suggest always starting with globally-scoped mocks. + +## Mocking a Cell's `QUERY` + +To mock a Cell's `QUERY`, find the file ending with with `.mock.js` in your Cell's directory. This file exports a value named `standard`, which is the mock-data that will be returned for your Cell's `QUERY`. + +```jsx {4,5,6,12,13,14} title="UserProfileCell/UserProfileCell.js" +export const QUERY = gql` + query UserProfileQuery { + userProfile { + id + } + } +` + +// UserProfileCell/UserProfileCell.mock.js +export const standard = { + userProfile: { + id: 42 + } +} +``` + +Since the value assigned to `standard` is the mock-data associated with the `QUERY`, modifying the `QUERY` means you also need to modify the mock-data. + +```diff title="UserProfileCell/UserProfileCell.js" +export const QUERY = gql` + query UserProfileQuery { + userProfile { + id ++ name + } + } +` + +// UserProfileCell/UserProfileCell.mock.js +export const standard = { + userProfile: { + id: 42, ++ name: 'peterp', + } +} +``` + +> **Behind the scenes** +> +> Redwood uses the value associated with `standard` as the second argument to `mockGraphQLQuery`. diff --git a/docs/versioned_docs/version-7.0/graphql/realtime.md b/docs/versioned_docs/version-7.0/graphql/realtime.md new file mode 100644 index 000000000000..d9d842c7fa2e --- /dev/null +++ b/docs/versioned_docs/version-7.0/graphql/realtime.md @@ -0,0 +1,9 @@ +# GraphQL Realtime + +In GraphQL, there are two options for real-time updates: **live queries** and **subscriptions**. + +Subscriptions are part of the GraphQL specification, whereas live queries are not. + +There are times where subscriptions are well-suited for a realtime problem — and in some cases live queries may be a better fit. Later we’ll explore the pros and cons of each approach and how best to decide that to use and when. + +See [subscriptions and live queries](../realtime.md#subscriptions-and-live-queries) for setup and usaged. diff --git a/docs/versioned_docs/version-7.0/graphql/trusted-documents.md b/docs/versioned_docs/version-7.0/graphql/trusted-documents.md new file mode 100644 index 000000000000..a8f1aaf98881 --- /dev/null +++ b/docs/versioned_docs/version-7.0/graphql/trusted-documents.md @@ -0,0 +1,152 @@ +# Trusted Documents + +RedwoodJS can be setup to enforce [persisted operations](https://the-guild.dev/graphql/yoga-server/docs/features/persisted-operations) – alternatively called [Trusted Documents](https://benjie.dev/graphql/trusted-documents). + +Use trusted documents if your GraphQL API is only for your own app (which is the case for most GraphQL APIs) for a massively decreased attack-surface, increased performance, and decreased bandwidth usage. + +At app build time, Redwood will extract the GraphQL documents (queries, etc) and make them available to the server. At run time, you can then send "document id" or "hash" instead of the whole document; only accept requests with a known document id. + +This prevents malicious attackers from executing arbitrary GraphQL thus helping with unwanted resolver traversal or information leaking. + +See [Configure Trusted Documents](#configure-trusted-documents) for more information and usage instructions. + +## Trusted Documents Explained + +When configured to use Trusted Documents, your project will: + +1. When generating types, generate files in `web/src/graphql` needed for persisted aka trusted documents, for example: + +```json title=web/src/graphql/persisted-documents.json +{ + "4dd4c49aef34e20af52efb50a1d0ebb0b8062b6d": "query FindAuthorQuery($id: Int!) { __typename author: user(id: $id) { __typename email fullName } }", + "46e9823d95110ebb2ef17ef82fff5c19a468f8a6": "query FindBlogPostQuery($id: Int!) { __typename blogPost: post(id: $id) { __typename author { __typename email fullName } body createdAt id title } }", + "421bcffdde84d448ec1a1b30b36eaeb966f00257": "query BlogPostsQuery { __typename blogPosts: posts { __typename author { __typename email fullName } body createdAt id title } }", + "f6ae606548009c2cd4c69b9aecebad0a730ba23d": "mutation DeleteContactMutation($id: Int!) { __typename deleteContact(id: $id) { __typename id } }", + "f7d2df28fcf87b0c29d225df79363d1c69159916": "query FindContactById($id: Int!) { __typename contact: contact(id: $id) { __typename createdAt email id message name } }", + "7af93a7e454d9c59bbb77c14e0c78e99207fd0c6": "query FindContacts { __typename contacts { __typename createdAt email id message name } }", + "e01ad8e899ac908458eac2d1f989b88160a0494b": "query EditContactById($id: Int!) { __typename contact: contact(id: $id) { __typename createdAt email id message name } }", + "94f51784b918a52e9af64f3c1fd4356903b611f8": "mutation UpdateContactMutation($id: Int!, $input: UpdateContactInput!) { __typename updateContact(id: $id, input: $input) { __typename createdAt email id message name } }", + "da35778949e1e8e27b7d1bb6b2a630749c5d7060": "mutation CreateContactMutation($input: CreateContactInput!) { __typename createContact(input: $input) { __typename id } }", + "4f880f909a16b7fe15898fe33a2ee26933466719": "query EditPostById($id: Int!) { __typename post: post(id: $id) { __typename authorId body createdAt id title } }", + "32b9225df81ff7845fedfa6d5c86c5d4a76073d2": "mutation UpdatePostMutation($id: Int!, $input: UpdatePostInput!) { __typename updatePost(id: $id, input: $input) { __typename authorId body createdAt id title } }", + "daf229dcea085f1beff91102a63c2ba9c88e8481": "mutation CreatePostMutation($input: CreatePostInput!) { __typename createPost(input: $input) { __typename id } }", + "e3405f6dcb6460943dd604423f0f517bc8318aaa": "mutation DeletePostMutation($id: Int!) { __typename deletePost(id: $id) { __typename id } }", + "43a94ad9a150aa7a7a665c73a931a5b18b6cc28b": "query FindPostById($id: Int!) { __typename post: post(id: $id) { __typename authorId body createdAt id title } }", + "76308e971322b1ece4cdff75185bb61d7139e343": "query FindPosts { __typename posts { __typename authorId body createdAt id title } }", + "287beba179ef2c4448b4d3b150701993eddc07d6": "query BlogPostsQueryTrustedPage { __typename blogPosts: posts { __typename author { __typename email fullName } body createdAt id title } }" +} +``` + +2. They contain the query and hash that represents and identifies that query +3. Files with functions to lookup the generated trusted document such as: + +```ts title=web/src/graphql/gql.ts +// ... +export function graphql( + source: "\n query FindPosts {\n posts {\n id\n title\n body\n authorId\n createdAt\n }\n }\n" +): (typeof documents)["\n query FindPosts {\n posts {\n id\n title\n body\n authorId\n createdAt\n }\n }\n"]; +// ... +export function gql(source: string) { + return graphql(source); +} + +``` + +and the generated AST with the hash id in `web/src/graphql/graphql.ts` + +```ts title=web/src/graphql/graphql.ts +// ... +export const FindPostsDocument = {"__meta__":{"hash":"76308e971322b1ece4cdff75185bb61d7139e343"},"kind":"Document","definitions":[{"kind":"OperationDefinition","operation":"query","name":{"kind":"Name","value":"FindPosts"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"__typename"}},{"kind":"Field","name":{"kind":"Name","value":"posts"},"selectionSet":{"kind":"SelectionSet","selections":[{"kind":"Field","name":{"kind":"Name","value":"__typename"}},{"kind":"Field","name":{"kind":"Name","value":"id"}},{"kind":"Field","name":{"kind":"Name","value":"title"}},{"kind":"Field","name":{"kind":"Name","value":"body"}},{"kind":"Field","name":{"kind":"Name","value":"authorId"}},{"kind":"Field","name":{"kind":"Name","value":"createdAt"}}]}}]}}]} as unknown as DocumentNode<FindPostsQuery, FindPostsQueryVariables>; +// ... +``` + +so that when a query or mutation is made, the web side GraphQL client doesn't send the query, but rather **just the hash id** so that the GraphQL Server can lookup the pre-generated query to run. + +```http +{"operationName":"FindPosts","variables":{},"extensions":{"persistedQuery":{"version":1,"sha256Hash":"76308e971322b1ece4cdff75185bb61d7139e343"}}} +``` + +It does so by adding a `api/src/lib/trustedDocumentsStore.ts` file for use on the GraphQL api side. + +```ts title=api/src/lib/trustedDocumentsStore.ts +export const store = { + // ... + '76308e971322b1ece4cdff75185bb61d7139e343': + 'query FindPosts { __typename posts { __typename authorId body createdAt id title } }', + // ... +} +``` + +:::note + +See how the `76308e971322b1ece4cdff75185bb61d7139e343` hash ids match? +::: + +Now, when the client requests to make a query for `76308e971322b1ece4cdff75185bb61d7139e343`, the GraphQL server knows to execute the corresponding query associated with that hash. + +This means that because queries are pre-generated and the hash ids ***must match**, there is no way for any un-trusted or ad-hock queries to get executed by the GraphQL server. + +Thus preventing unwanted queries or GraphQl traversal attacks, + +* Configure RedwoodJS to use Trusted Documents via `redwood.toml` +* Configure the GraphQL Server + +## Configure Trusted Documents + +Below are instructions to manually configure Trusted Documents in your RedwoodJS project. + +Alternatively, you can use the `yarn redwood setup graphql trusted-documents` [CLI setup command](../cli-commands.md#setup-graphql-trusted-docs). + + +### Configure redwood.toml + +Setting `trustedDocuments` to true will + +* populate `web/src/graphql` files with the pre-generated documents +* inform Apollo GraphQL client to send the document hashes and not the query itself + +```toml title=redwood.toml +... +[graphql] + trustedDocuments = true +... +``` + +### Configure GraphQL Handler + +As part of GraphQL type and codegen, the `trustedDocumentsStore` is created in `api/src/lib`. + +This is the same information that is created in `web/src/graphql/persisted-documents.json` but wrapped in a `store` that can be easily imported and passed to the GraphQL Handler. + +To enable trusted documents, configure `trustedDocuments` with the store. + +```ts title=api/src/functions/graphql.ts +import { createGraphQLHandler } from '@redwoodjs/graphql-server' + +// ... +import { store } from 'src/lib/trustedDocumentsStore' + +export const handler = createGraphQLHandler({ + getCurrentUser, + loggerConfig: { logger, options: {} }, + directives, + sdls, + services, + trustedDocuments: { store }, + onException: () => { + // Disconnect from your database with an unhandled exception. + db.$disconnect() + }, +}) +``` + +If you'd like to customize the message when a query is not permitted, you can set the `persistedQueryOnly` configuration setting in `customErrors`: + +``` + trustedDocuments: { + store, + customErrors: { + persistedQueryOnly: 'This ad-hoc query is not allowed.' + }, + } +``` diff --git a/docs/versioned_docs/version-7.0/how-to/background-worker.md b/docs/versioned_docs/version-7.0/how-to/background-worker.md new file mode 100644 index 000000000000..d9379ffc6bf8 --- /dev/null +++ b/docs/versioned_docs/version-7.0/how-to/background-worker.md @@ -0,0 +1,95 @@ +--- +slug: creating-a-background-worker-with-exec-and-faktory +--- + +# Creating a Background Worker with Exec and Faktory + +In this how to, we'll use Redwood's [exec CLI command](cli-commands.md#exec) to create a background worker using [Faktory](https://contribsys.com/faktory/). + +At a high level, Faktory is a language-agnostic, persistent background-job server. +You can run it [with Docker](https://github.com/contribsys/faktory/wiki/Docker). + +We'll have to have a way of communicating with the server from our Redwood app. +We'll use this [node library](https://github.com/jbielick/faktory_worker_node) to send jobs from our Redwood app to our Faktory server. + +## Creating the Faktory Worker + +Let's create our faktory worker. +First, generate the worker script: + +``` +yarn rw g script faktoryWorker +``` + +We'll start by registering a task called `postSignupTask` in our worker: + +```javascript title="scripts/faktoryWorker.js" +const { postSignupTask } from '$api/src/lib/tasks' +import { logger } from '$api/src/lib/logger' + +import faktory from 'faktory-worker' + +faktory.register('postSignupTask', async (taskArgs) => { + logger.info("running postSignupTask in background worker") + + await postSignupTask(taskArgs) +}) + +export default async ({ _args }) => { + const worker = await faktory + .work({ + url: process.env.FAKTORY_URL, + }) + .catch((error) => { + logger.error(`worker failed to start: ${error}`) + process.exit(1) + }) + + worker.on('fail', ({ _job, error }) => { + logger.error(`worker failed to start: ${error}`) + }) +} +``` + +This won't work yet as we haven't made `postSignupTask` in `api/src/lib/tasks.js` or set `FAKTORY_URL`. +Set `FAKTORY_URL` in `.env` to where your server's running. + +In `postSignupTask`, we may want to perform operations that need to contact external services, such as sending an email. +For this type of work, we typically don't want to hold up the request/response cycle and can perform it in the background: + +```javascript title="api/src/lib/tasks.js" +export const postSignupTask = async ({ userId, emailPayload }) => { + // Send a welcome email to new user. + // You'll have to have an integration with an email service for this to work. + await sendEmailWithTemplate({ + ...emailPayload, + TemplateModel: { + ...emailPayload.TemplateModel, + }, + }) +} +``` + +Once we've created our task, we need to call it in the right place. +For this task, it makes sense to call it right after the user has completed their signup. +This is an example of a Service that'll most likely be called via a GraphQL Mutation. + +```javascript title="src/services/auth/auth.js" +const faktory = require('faktory-worker') + +export const signUp = async ({ input }) => { + // Perform all the signup operations, such as creating an entry in the DB and auth provider + // ... + + // The, send our task to the Faktory server + const client = await faktory.connect() + await client.job('postSignupTask', { ...taskArgs, }).push() + await client.close() +} + +``` + +That's it—we're done! +Run your Faktory server using Docker and run the worker using `yarn rw exec faktoryWorker`. + +If your Faktory server is running and you have set `FAKTORY_URL` correctly, you'll see the server pick up the jobs and your worker process the job. diff --git a/docs/versioned_docs/version-7.0/how-to/build-dashboards-fast-with-tremor.md b/docs/versioned_docs/version-7.0/how-to/build-dashboards-fast-with-tremor.md new file mode 100644 index 000000000000..36a149b5de4b --- /dev/null +++ b/docs/versioned_docs/version-7.0/how-to/build-dashboards-fast-with-tremor.md @@ -0,0 +1,413 @@ +--- + +description: "Learn how to build dashboards fast using the Tremor React library of data visualization components." +--- + +# Build Dashboards Fast with Tremor + +[Tremor](https://www.tremor.so) is a React library to build dashboards fast. Its modular components are fully open-source, made by data scientists and software engineers with a sweet spot for design. + +In this how to, you'll learn how to + +* setup tremor in a new or existing RedwoodJS app +* use tremor components to layout a new dashboard +* use a chart and card component to visualize static data +* access a GitHub repo to make your dashboard dynamic using an [example RedwoodJS app](https://github.com/redwoodjs/redwoodjs-tremor-dashboard-demo) + +## Live Demo + +See what's possible with a [dynamic dashboard live demo](https://tremor-redwood-dashboard-demo.netlify.app) build with RedwoodJS and Tremor. + +Cool, right? + +Let's get started! + +## Create a New RedwoodJS Project + + +In our terminal, we create a new RedwoodJS project: + +```bash +yarn create redwood-app my-project --ts +``` + +> **Note:** If you already have a RedwoodJS project, you can skip this step and continue with the next section. + +If you do not want a TypeScript project, omit the `--ts` flag. + +> **Important:** RedwoodJS prefers yarn over npm because a project is monorepo with api and web workspaces. You will install tremor and other web packages using yarn workspaces. + + +Use the Redwood setup command to install `TailwindCSS`, its peer dependencies, and create the `tailwind.config.js` file. + + +```bash +yarn rw setup ui tailwindcss +``` + +Install `tremor` in the web workspace from your command line via yarn. + +```bash +yarn workspace web add @tremor/react +``` + +Install `heroicons version 1.0.6` from your command line via yarn. + +```bash +yarn workspace web add @heroicons/react@1.0.6 +``` + +Update tailwind config `web/config/tailwind.config.js` **including the path to the tremor** module. + +```js +/** @type {import('tailwindcss').Config} */ +module.exports = { + content: [ + 'src/**/*.{js,jsx,ts,tsx}', + '../node_modules/@tremor/**/*.{js,ts,jsx,tsx}', + ], + theme: { + extend: {}, + }, + plugins: [], +} +``` + +> **Note:** the path for node_modules is `../` because the web workspace is in a subdirectory of the root directory. + +## Add a Dashboard Page + +Generate a page from your command line. + +```bash +yarn rw g page dashboard / +``` + +You will now have a new page at `web/src/pages/DashboardPage/DashboardPage.tsx` and `web/src/Routes.tsx` will have a new route added at: + +```tsx filename="web/src/Routes.tsx" +// web/src/Routes.tsx` + +<Route path="/" page={DashboardPage} name="dashboard" /> +``` + + +Add simple area chart to the `DashboardPage`: + +```jsx +import { Grid, Col, Card, Title, AreaChart } from '@tremor/react' + +import { MetaTags } from '@redwoodjs/web' + +const DashboardPage = () => { + const chartdata = [ + { + date: 'Jan 22', + SemiAnalysis: 2890, + 'The Pragmatic Engineer': 2338, + }, + { + date: 'Feb 22', + SemiAnalysis: 2756, + 'The Pragmatic Engineer': 2103, + }, + { + date: 'Mar 22', + SemiAnalysis: 3322, + 'The Pragmatic Engineer': 2194, + }, + { + date: 'Apr 22', + SemiAnalysis: 3470, + 'The Pragmatic Engineer': 2108, + }, + { + date: 'May 22', + SemiAnalysis: 3475, + 'The Pragmatic Engineer': 1812, + }, + { + date: 'Jun 22', + SemiAnalysis: 3129, + 'The Pragmatic Engineer': 1726, + }, + ] + + const dataFormatter = (number: number) => { + return '$ ' + Intl.NumberFormat('us').format(number).toString() + } + + return ( + <div className="m-12"> + <MetaTags title="Dashboard" description="Dashboard page" /> + + <h1 className="text-2xl mb-12">Dashboard</h1> + + <Grid numCols={1} numColsSm={2} numColsLg={3} className="my-8 gap-6"> + <Col numColSpan={1} numColSpanLg={3}> + <Card> + <Title>Newsletter revenue over time (USD) + + + + +
+ ) +} + +export default DashboardPage +``` + +Start your RedwoodJS development server + +```bash +yarn rw dev +``` + +Your app will start up and you should see the Dashboard page with an area with two `Newsletter revenue over time (USD)` data series. + +## Add a new component for a KPI Card + +Generate a component for a KPI (Key Performance Indicator) from your command line. + +```bash +yarn rw g component KpiCard +``` + +You will now have a new React component at `/web/src/components/KpiCard/KpiCard.tsx`. + +Update the `KpiCard` component to import the `Card` component and assemble a card using its default +styling. + +To create our first KPI, we import the `Metric` and `Text` component and place them within the card component. We use [Tailwind CSS'](https://tailwindcss.com/docs/utility-first) utilities in the **className** property to reduce the card's width and to center it horizontally. + +To make our KPI card more insightful, we add a `ProgressBar`, providing +contextual details about our metric. To align both text elements, we also import +the `Flex` component. + +```tsx filename="/web/src/components/KpiCard/KpiCard.tsx" +// /web/src/components/KpiCard/KpiCard.tsx + +import { + BadgeDelta, + DeltaType, + Card, + Flex, + Metric, + ProgressBar, + Text, +} from '@tremor/react' + +export type Kpi = { + title: string + metric: string + progress: number + metricTarget: string + delta: string + deltaType: DeltaType +} + +interface Props { + kpi: Kpi +} + +const KpiCard = ({ kpi }: Props) => { + return ( + + +
+ {kpi.title} + {kpi.metric} +
+ {kpi.delta} +
+ + {`${kpi.progress}% (${kpi.metric})`} + {kpi.metricTarget} + + +
+ ) +} + +export default KpiCard +``` + +## Add the KPI Card component to your Dashboard + +Import the `KpiCard` component and `Kpi` type. + +```tsx +import KpiCard from 'src/components/KpiCard/KpiCard' // 👈 Import the KpiCard component +import type { Kpi } from 'src/components/KpiCard/KpiCard' // 👈 Import the Kpi type +``` + +Next, create the `kpi` data collection with sample data + +```tsx + const kpis: Kpi[] = [ // 👈 Create some sample KPI data + { + title: 'Sales', + metric: '$ 12,699', + progress: 15.9, + metricTarget: '$ 80,000', + delta: '13.2%', + deltaType: 'moderateIncrease', + }, + { + title: 'Profit', + metric: '$ 45,564', + progress: 36.5, + metricTarget: '$ 125,000', + delta: '23.9%', + deltaType: 'increase', + }, + { + title: 'Customers', + metric: '1,072', + progress: 53.6, + metricTarget: '2,000', + delta: '10.1%', + deltaType: 'moderateDecrease', + }, + ] +``` + +Then iterate over the collection to add a `KpiCard` inside new `Col` for each KPI data item: + +```tsx + {kpis.map((kpi, i) => ( + + + + ))} +``` + +Your Dashboard page should now look like: + +```tsx +import { Grid, Col, Card, Title, AreaChart } from '@tremor/react' + +import { MetaTags } from '@redwoodjs/web' + +import KpiCard from 'src/components/KpiCard/KpiCard' // 👈 Import the KpiCard component +import type { Kpi } from 'src/components/KpiCard/KpiCard' // 👈 Import the Kpi type + +const DashboardPage = () => { + const chartdata = [ + { + date: 'Jan 22', + SemiAnalysis: 2890, + 'The Pragmatic Engineer': 2338, + }, + { + date: 'Feb 22', + SemiAnalysis: 2756, + 'The Pragmatic Engineer': 2103, + }, + { + date: 'Mar 22', + SemiAnalysis: 3322, + 'The Pragmatic Engineer': 2194, + }, + { + date: 'Apr 22', + SemiAnalysis: 3470, + 'The Pragmatic Engineer': 2108, + }, + { + date: 'May 22', + SemiAnalysis: 3475, + 'The Pragmatic Engineer': 1812, + }, + { + date: 'Jun 22', + SemiAnalysis: 3129, + 'The Pragmatic Engineer': 1726, + }, + ] + + const kpis: Kpi[] = [ // 👈 Create some sample KPI data + { + title: 'Sales', + metric: '$ 12,699', + progress: 15.9, + metricTarget: '$ 80,000', + delta: '13.2%', + deltaType: 'moderateIncrease', + }, + { + title: 'Profit', + metric: '$ 45,564', + progress: 36.5, + metricTarget: '$ 125,000', + delta: '23.9%', + deltaType: 'increase', + }, + { + title: 'Customers', + metric: '1,072', + progress: 53.6, + metricTarget: '2,000', + delta: '10.1%', + deltaType: 'moderateDecrease', + }, + ] + + const dataFormatter = (number: number) => { + return '$ ' + Intl.NumberFormat('us').format(number).toString() + } + + return ( +
+ + +

Dashboard

+ + + {kpis.map((kpi, i) => ( + + + + ))} + + + Newsletter revenue over time (USD) + + + + +
+ ) +} + +export default DashboardPage +``` + +Congratulations! You made your first dashboard. + +## Next Steps + +Now that you have a Dashboard + +1. Explore the other [components](https://www.tremor.so/components) and [blocks](https://www.tremor.so/blocks) that you can use to showcase your data + +2. Learn how to make a [dynamic dashboard using RedwoodJS cells](https://github.com/redwoodjs/redwoodjs-tremor-dashboard-demo) to fetch data from a Prisma-backed database using GraphQL. + +3. See a [dynamic dashboard live demo](https://tremor-redwood-dashboard-demo.netlify.app)! + + diff --git a/docs/versioned_docs/version-7.0/how-to/custom-function.md b/docs/versioned_docs/version-7.0/how-to/custom-function.md new file mode 100644 index 000000000000..a12d41176294 --- /dev/null +++ b/docs/versioned_docs/version-7.0/how-to/custom-function.md @@ -0,0 +1,211 @@ +# Custom Function + +You may not have noticed, but when you're making GraphQL calls, you're actually calling a [Function](https://docs.netlify.com/functions/overview/) (not to be confused with a Javascript `function`) on the API side. Capital-F Functions are meant to be deployed to serverless providers like AWS Lambda. (We're using Netlify's nomenclature when we call them Functions.) + + + + + +Did you know you can create your own Functions that do whatever you want? Normally we recommend that if you have custom behavior, even if it's unrelated to the database, you make it available as a GraphQL field so that your entire application has one, unified API interface. But rules were meant to be broken! + +How about a custom Function that returns the timestamp from the server? + +## Creating a Function + +Step one is to actually create the custom Function. Naturally, we have a generator for that. Let's call our custom Function "serverTime": + +```bash +yarn rw generate function serverTime +``` + +That creates a stub you can test out right away. Make sure your dev server is running (`yarn rw dev`), then point your browser to `http://localhost:8910/.redwood/functions/serverTime`. + +![serverTime Function output](https://user-images.githubusercontent.com/32992335/107839683-609c2300-6d62-11eb-93d7-ff9c1bfb0ff2.png) + +### Interlude: `apiUrl` + +The `.redwood/functions` bit in the link you pointed your browser to is what's called the `apiUrl`. You can configure it in your `redwood.toml`: + +```toml {5} +# redwood.toml + +[web] + port = 8910 + apiUrl = "/.redwood/functions" +``` + +After you setup a deploy (via `yarn rw setup deploy `), it'll change to something more appropriate, like `.netlify/functions` in Netlify's case. + + + + +Why do we need `apiUrl`? Well, when you go to deploy, your serverless functions won't be in the same place as your app; they'll be somewhere else. Sending requests to the `apiUrl` let's your provider handle the hard work of figuring out where they actually are, and making sure that your app can actually access them. + +If you were to try and fetch `http://localhost:8911/serverTime` from the web side, you'd run into an error you'll get to know quite well: CORS. + +#### Interludeception: CORS + +Time for an interlude within an interlude, because that's how you'll always feel when it comes to CORS: you were doing something else, and then `No 'Access-Control-Allow-Origin' header is present on the requested resource`. Now you're doing CORS. + +If you don't know much about CORS, it's something you probably should know some about at some point. CORS stands for Cross Origin Resource Sharing; in a nutshell, by default, browsers aren't allowed to access resources outside their own domain. So, requests from `localhost:8910` can only access resources at `localhost:8910`. Since all your serverless functions are at `localhost:8911`, doing something like + +```javascript +// the `http://` is important! +const serverTime = await fetch('http://localhost:8911/serverTime') +``` + +from the web side would give you an error like: + +``` +Access to fetch at 'http://localhost:8911/serverTime' from origin 'http://localhost:8910' has been blocked by CORS policy: No 'Access-Control-Allow-Origin' header is present on the requested resource. If an opaque response serves your needs, set the request's mode to 'no-cors' to fetch the resource with CORS disabled. +``` + +We could set the headers for `serverTime` to allow requests from any origin... but maybe a better idea would be to never request `8911` from `8910` in the first place. Hence the `apiUrl`! We're making a request to `8910/.redwood/functions/serverTime`—still the same domain—but [Vite](https://github.com/redwoodjs/redwood/blob/main/packages/vite/src/index.ts#L119) proxies them to `localhost:8911/serverTime` for us. + +## Getting the Time + +Ok—back to our custom Function. Let's get the current time and return it in the body of our handler: + +```javascript {4} title="api/src/functions/serverTime.js" +export const handler = async (event, context) => { + return { + statusCode: 200, + body: new Date() + } +} +``` + +![Time output screenshot](https://user-images.githubusercontent.com/300/81352089-87faec80-907a-11ea-96f7-bb05345a86d7.png) + +> Here we're using a [Chrome extension](https://chrome.google.com/webstore/detail/json-viewer/gbmdgpbipfallnflgajpaliibnhdgobh) that prettifies data that could be identified as JSON. In this case, the date is wrapped in quotes, which is valid JSON, so the extension kicks in. + +How about we make sure the response is a JSON object: + +```javascript {4-5} title="api/src/functions/serverTime.js" +export const handler = async (event, context) => { + return { + statusCode: 200, + headers: { 'Content-Type': 'application/json ' }, + body: JSON.stringify({ time: new Date() }), + } +} +``` + +![JSON time output screenshot](https://user-images.githubusercontent.com/300/81352131-9fd27080-907a-11ea-8db0-6308a4c48b5f.png) + +> Note that Node.js doesn't have ES module support (yet), but we use Babel to transpile during the build phase so you can still use `import` syntax for external packages in your Functions. + +### Bonus: Filtering by Request Method + +Since you are most definitely an elite hacker, you probably noticed that our new endpoint is available via all HTTP methods: **GET**, **POST**, **PATCH**, etc. In the spirit of [REST](https://www.codecademy.com/articles/what-is-rest), this endpoint should really only be accessible via a **GET**. + +> Again, because you're an elite hacker you definitely said "excuse me, actually this endpoint should respond to **HEAD** and **OPTIONS** methods as well." Okay fine, but this is meant to be a quick introduction, cut us some slack! Why don't you write a recipe for us and open a PR, smartypants?? + +Inspecting the `event` argument being sent to `handler` gets us all kinds of juicy details on this request: + +```javascript {2} title="api/src/functions/serverTime.js" +export const handler = async (event, context) => { + console.log(event) + return { + statusCode: 200, + headers: { 'Content-Type': 'application/json ' }, + body: JSON.stringify({ time: new Date() }), + } +} +``` + +Take a look in the terminal window where you're running `yarn rw dev` to see the output: + +```json +{ + "httpMethod": "GET", + "headers": { + "host": "localhost:8911", + "connection": "keep-alive", + "cache-control": "max-age=0", + "dnt": "1", + "upgrade-insecure-requests": "1", + "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.129 Safari/537.36", + "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng;q=0.8,application/signed-exchange;v=b3;q=0.9", + "sec-fetch-site": "none", + "sec-fetch-mode": "navigate", + "sec-fetch-user": "?1", + "sec-fetch-dest": "document", + "accept-encoding": "gzip, deflate, br", + "accept-language": "en-US,en;q=0.9" + }, + "path": "/serverTime", + "queryStringParameters": {}, + "body": "", + "isBase64Encoded": false +} +``` + +That first entry, `httpMethod`, is what we want. Let's check the method and return a 404 if it isn't a **GET**: + +```javascript {2-4} title="api/src/functions/serverTime.js" +export const handler = async (event, context) => { + if (event.httpMethod !== 'GET') { + return { statusCode: 404 } + } + + return { + statusCode: 200, + headers: { 'Content-Type': 'application/json ' }, + body: JSON.stringify({ time: new Date() }), + } +} +``` + +It's tough to test other HTTP methods in the browser without installing an extension, but we can do it from the command line with curl: + +```bash +$ curl -XPOST http://localhost:8911/serverTime -I +``` + +You should see: + +```bash +HTTP/1.1 404 Not Found +X-Powered-By: Express +Date: Thu, 07 May 2020 22:33:55 GMT +Connection: keep-alive +Content-Length: 0 +``` + +And just to be sure, let's make that same request with a **GET** (curl's default method): + +```bash +$ curl http://localhost:8911/serverTime +{"time":"2020-05-07T22:36:12.973Z"} +``` + +> If you leave the `-I` flag on then curl will default to a HEAD request! Okay fine, you were right elite hacker! + +### Super Bonus: Callback Hell + +Redwood uses the async/await version of Function handlers, but you can also use the callback version. In that case your Function would look something like: + +```javascript {1,3,6,10} title="api/src/functions/serverTime.js" +export const handler = (event, context, callback) => { + if (event.httpMethod !== 'GET') { + callback(null, { statusCode: 404 }) + } + + callback(null, { + statusCode: 200, + headers: { 'Content-Type': 'application/json ' }, + body: JSON.stringify({ time: new Date() }), + }) +} +``` + +Yeah, kinda gross. What's with that `null` as the first parameter? That's used if your handler needs to return an error. More on callback-based handlers can be found in [Netlify's docs](https://docs.netlify.com/functions/build-with-javascript/#format). + +The callback syntax may not be _too_ bad for this simple example. But, if you find yourself dealing with Promises inside your handler, and you choose to go use callback syntax, you may want to lie down and rethink the life choices that brought you to this moment. If you still want to use callbacks you had better hope that time travel is invented by the time this code goes into production, so you can go back in time and prevent yourself from ruining your own life. You will, of course, fail because you already chose to use callbacks the first time so you must have been unsuccessful in stopping yourself when you went back. + +Trust us, it's probably best to just stick with async/await instead of tampering with spacetime. + +### Conclusion + +We hope this gave you enough info to get started with custom Functions, and that you learned a little something about the futility of trying to change the past. Now go out and build something awesome! diff --git a/docs/versioned_docs/version-7.0/how-to/dbauth-passwordless.md b/docs/versioned_docs/version-7.0/how-to/dbauth-passwordless.md new file mode 100644 index 000000000000..e07e0bcd4c05 --- /dev/null +++ b/docs/versioned_docs/version-7.0/how-to/dbauth-passwordless.md @@ -0,0 +1,640 @@ +# Setting up dbAuth to be passwordless + +Security is really important. Sometimes you don't want to integrate with a third-party authentication services. Whatever the reason, Redwood has you covered with Redwood's dbAuth to authenticate users. This is a great option. + +One thing though is now you're collecting the user's login and password. If you'd like to not collect that, an alternative is to generate a token in place of the password. The only data needed for passwordless is the users email address. + +In this how-to I'll show you how to set up dbAuth to be passwordless, you'll still need to set up a way to [send emails](../how-to/sending-emails.md), but there's plenty of ways to do that. + +## Background + +Let me start by sharing a little bit about how passwordless works. +### What is a passwordless authentication method? +A passwordless authentication method is a method of authentication where the user is not required to enter a password. Instead, the user is sent a link to their email address. When they click the link, they are logged in. + +Passwordless uses a token that is time-sensitive. So instead of storing a password, we store a token, and an expiration. + +That token is generated randomly and is stored in the database. +## How to do it +### 1. Modify the Prisma schema +First, we need to modify the Prisma schema. + +If you followed the tutorial you'll have a `User` model. Here's is what it looks like with after the changes. + +```jsx {4-6} +model User { + id Int @id @default(autoincrement()) + name String? + email String @unique + loginToken String + loginTokenExpiresAt DateTime? + salt String? +} +``` + +Make note of the optional `salt` field. + +Once you've made the changes, you'll need to migrate your database. + +```bash +yarn rw prisma migrate dev +``` + +### 2. Setting up the generateToken function +Next, we need to create a function that will generate a token and an expiration date. + +If you followed the tutorial, you might not have a `/api/src/services/users/users.js` file. If that's the case, you can create it with the following command using your terminal. + +```bash +yarn rw g service users +``` + +Now that you have the file, let's add the `generateToken` function. + +```javascript {21} title="/api/src/services/users/users.js" +// add this import to the top of the file +import CryptoJS from 'crypto-js' +// add this to the bottom of the file +export const generateLoginToken = async ({ email }) => { + try { + // look up if the user exists + let lookupUser = await db.user.findFirst({ where: { email } }) + if (!lookupUser) return { message: 'Login Request received' } + // here we're going to generate a random password of 6 numbers + let randomNumber = (() => { + let random = CryptoJS.lib.WordArray.random(6) + let randomString = random.toString() + let sixDigitNumber = randomString.replace(/\D/g, '') + if (sixDigitNumber.length < 6) { + sixDigitNumber = sixDigitNumber.padStart(6, '0') + } + if (sixDigitNumber.length > 6) { + sixDigitNumber = sixDigitNumber.slice(0, 6) + } + return sixDigitNumber.toString() + })() + console.log({ randomNumber }) // email the user this number + let salt = CryptoJS.lib.WordArray.random(30) + let loginToken = CryptoJS.PBKDF2(randomNumber, salt, { + keySize: 256 / 32, + }).toString() + // now we'll update the user with the new salt and loginToken + let loginTokenExpiresAt = new Date() + loginTokenExpiresAt.setMinutes(loginTokenExpiresAt.getMinutes() + 15) + let data = { + salt, + loginToken, + loginTokenExpiresAt, + } + await db.user.update({ + where: { id: lookupUser.id }, + data + }) + + return { message: 'Login Request received' } + } catch (error) { + console.log({ error }) + throw new UserInputError(error.message) + } +} + +``` +### 3. Add generateToken to the SDL and secure loginToken + +In addition to the new function, we need to add it to the sdl file. While we're here let's also ensure we do not expose the loginToken. This file may be users.sdl.js or users.sdl.ts depending on if you set up Redwood to use JavaScript or TypeScript. + +```javascript {21} title="/api/src/graphql/users.sdl.js" +export const schema = gql` + type User { + id: Int! + name: String + email: String! + } + input CreateUserInput { + name: String + email: String! + } + input UpdateUserInput { + name: String + email: String! + } + type userTokenResponse { + message: String! + } + type Mutation { + createUser(input: CreateUserInput!): User! @requireAuth + updateUser(id: Int!, input: UpdateUserInput!): User! @requireAuth + deleteUser(id: Int!): User! @requireAuth + generateToken(email: String!): userTokenResponse! @skipAuth + } +``` + +### 4. Modify the auth function + +We need to consider how we want to limit the authentication. I've added a expiration date to the token, so we'll need to check that. + +```js title="/api/src/functions/auth.js" +// ... other functions +const loginOptions = { + handler: async (user) =>{ + let loginExpiresAt = new Date(user?.loginTokenExpiresAt) + let now = new Date() + if (loginExpiresAt < now) throw 'Login token expired' + // if the user logged in with a token we need to break + // the token. We'll do this by clearing the salt and + // expiration + // this will make the token a one-time use + db.user.update({ + where: { id: user.id }, + data: { + loginTokenExpiresAt: null, + salt: null, + } + }) + return user + }, + errors: { + // here I modified the following, feel free to modify the other messages + incorrectPassword: 'Incorrect token', + } +} +// we also need to update the signupOptions +const signupOptions = { + handler: ({ username, hashedPassword, userAttributes }) => { + return db.user.create({ + data: { + email: username, + loginToken: hashedPassword, + salt: null, + name: userAttributes.name, + } + }) + } + // ... othter stuff +} +// and last we need to update the authFields +const authHandler = new DbAuthHandler(event, context, { + db: db, + authModelAccessor: 'user', + authFields: { + id: 'id', + hashedPassword: 'loginToken', + salt: 'salt', + }, + // ... other stuff +}) +``` + +As of right now, nothing works, lets fix that. + +### 5. Making the login form + +We need to make a form that will allow the user to enter their email address. + +Let's start with the generator. + +```bash +yarn rw g component LoginPasswordlessForm +``` + +This created a component in `web/src/components/LoginPasswordlessForm/LoginPasswordlessForm.js`. Let's update it. + +```jsx title="/web/src/components/LoginPasswordlessForm/LoginPasswordlessForm.js" +import { + Form, + Label, + TextField, + PasswordField, + Submit, + FieldError, +} from '@redwoodjs/forms' +import { navigate, routes, Link } from '@redwoodjs/router' +import { MetaTags, useMutation } from '@redwoodjs/web' +import { Toaster, toast } from '@redwoodjs/web/toast' +const GENERATE_LOGIN_TOKEN = gql` + mutation generateLoginToken($email: String!) { + generateLoginToken(email: $email) { + message + } + } +` + +const LoginPasswordlessForm = ({ setWaitingForCode, setEmail }) => { + const [generateLoginToken] = useMutation( + GENERATE_LOGIN_TOKEN, + { + onCompleted: () => { + toast.success('Check your email for a login link') + setWaitingForCode(true) + }, + } + ) + const onSubmit = async (data) => { + setEmail(data.email) + const response = await generateLoginToken({ + variables: { email: data.email }, + fetchPolicy: 'no-cache', + }) + if (response.error) { + toast.error(response.error) + } + } + + return ( + <> + +
+ +
+
+
+

Login

+
+ +
+
+
+ + + + +
+ + Send Token + +
+ +
+
+
+
+ Don't have an account?{' '} + + Sign up! + +
+
+
+ + ) +} + +export default LoginPasswordlessForm +``` + +We aren't rendering it anywhere yet, but when we do it will look like this. + +![image](https://user-images.githubusercontent.com/638764/220204773-6c6aaf86-680f-4e2c-877c-3876070254d3.png) + +### 6. Making the login with token form + +Now we also need a form that will accept the code that was sent to the user. + +```bash +yarn rw g component LoginPasswordlessTokenForm +``` + +```jsx title="/web/src/components/LoginPasswordlessTokenForm/LoginPasswordlessTokenForm.js" +import { useEffect, useRef } from 'react' + +import { + Form, + Label, + TextField, + PasswordField, + Submit, + FieldError, +} from '@redwoodjs/forms' +import { navigate, routes, Link } from '@redwoodjs/router' +import { MetaTags, useMutation } from '@redwoodjs/web' +import { Toaster, toast } from '@redwoodjs/web/toast' + +import { useAuth } from 'src/auth' + +const LoginPasswordlessTokenForm = ({ setWaitingForCode, email, code }) => { + const { isAuthenticated, logIn } = useAuth() + useEffect(() => { + if (isAuthenticated) { + navigate(routes.home()) + } + if (email && code) { + console.log('email', email) + logIn({ username: email, password: code }) + } + }, [isAuthenticated, email, code, logIn]) + const onSubmit = async (data) => { + // login expects a username and password for dbauth + // so we are passing them. + const response = await logIn({ username: email, password: data.loginToken }) + if (response.error) { + toast.error(response.error) + } + } + + return ( + <> + +
+ +
+
+
+

+ Login with Token +

+
+ +
+
+
+ + + + + + + + +
+ Login +
+
+ +
+ +
+
+
+
+ Don't have an account?{' '} + + Sign up! + +
+
+
+ + ) +} + +export default LoginPasswordlessTokenForm +``` + +This will be the form loaded after the email is entered. Again, we aren't rendering it anywhere, but we will in the next step. + +Here's a preview of the form. + +![image](https://user-images.githubusercontent.com/638764/220212316-bcc5cde6-53cf-4a65-ab54-0e2763da924a.png) + +### 7. Making the new login page +Now each of those forms are controlled with the props we pass to them. We will make a new page that will control the state of the forms. + +```bash +yarn rw g page LoginPasswordless +``` + +```jsx title="/web/pages/LoginPasswordlessPage/LoginPasswordlessPage.js" +import { useEffect, useState } from 'react' + +import { useLocation } from '@redwoodjs/router' +import { MetaTags } from '@redwoodjs/web' + +import LoginPasswordlessForm from 'src/components/LoginPasswordlessForm/LoginPasswordlessForm' +import LoginPasswordlessTokenForm from 'src/components/LoginPasswordlessTokenForm/LoginPasswordlessTokenForm' + +const LoginPasswordlessPage = () => { + let [waitingForCode, setWaitingForCode] = useState(false) + let [email, setEmail] = useState() + let [code, setCode] = useState() + // onload set email from query string + let { search } = useLocation() + useEffect(() => { + let params = new URLSearchParams(search) + // decode magic param + let magic = params.get('magic') + let decoded = window.atob(params.get('magic')) + // if magic param exists, set email and waitingForCode + if (magic) { + // decoded is email:code + let [email, code] = decoded.split(':') + setEmail(email) + setCode(code) + setWaitingForCode(true) + } + }, [search]) + + return ( + <> + + + {!waitingForCode && ( + + )} + {waitingForCode && ( + + )} + + ) +} + +export default LoginPasswordlessPage +``` +### 8. Updating the signup page +We need to update the signup page to just take the email. + +```jsx title="/web/src/pages/SignupPage/SignupPage.js" +import { useRef } from 'react' +import { useEffect } from 'react' + +import { + Form, + Label, + TextField, + PasswordField, + FieldError, + Submit, +} from '@redwoodjs/forms' +import { Link, navigate, routes } from '@redwoodjs/router' +import { MetaTags } from '@redwoodjs/web' +import { toast, Toaster } from '@redwoodjs/web/toast' + +import { useAuth } from 'src/auth' + +const SignupPage = () => { + const { isAuthenticated, signUp } = useAuth() + let randomString = (length) { + if(typeof length == undefined) length = 32; + const characterSet = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'; + const array = new Uint8Array(length); + window.crypto.getRandomValues(array); + const returnString = Array.from(array) + .map((value) => characterSet[value % characterSet.length]) + .join(''); + return returnString; + } + useEffect(() => { + if (isAuthenticated) { + navigate(routes.home()) + } + }, [isAuthenticated]) + + // focus on username box on page load + const emailRef = useRef(null) + useEffect(() => { + emailRef.current?.focus() + }, []) + + const onSubmit = async (data) => { + const response = await signUp({ + username: data.email, + password: randomString(), // this is a random string and is not important + }) + + if (response.message) { + toast(response.message) + } else if (response.error) { + toast.error(response.error) + } else { + // user is signed in automatically + toast.success('Welcome!') + } + } + + return ( + <> + + +
+ +
+
+
+

Signup

+
+ +
+
+
+ + + + +
+ + Sign Up + +
+ +
+
+
+
+ Already have an account?{' '} + + Log in! + +
+
+
+ + ) +} + +export default SignupPage +``` + +You should see the changes and it should look like this! + +![image](https://user-images.githubusercontent.com/638764/220204883-800829ab-e037-41e1-a2da-d47923c4d20c.png) + + +### 9. Updating the routes +The last thing we need to to do is update the routes to use the new page. + +```jsx title="/web/src/Routes.js" +const Routes = () => { + // other stuff + return ( + + + + {/*other routes*/} + + ) +} +``` +## You did it! + +Now that you did you can rest easy. Your authentication relies on just your database but also, if some bad actor got access to it the only user data you have is really the email address. diff --git a/docs/versioned_docs/version-7.0/how-to/disable-api-database.md b/docs/versioned_docs/version-7.0/how-to/disable-api-database.md new file mode 100644 index 000000000000..39e380dbb914 --- /dev/null +++ b/docs/versioned_docs/version-7.0/how-to/disable-api-database.md @@ -0,0 +1,416 @@ +# Disable API/Database + +Did you know you could deploy your Redwood app without an API layer or database? Maybe you have a simple static site that doesn't need any external data, or you only need to digest a simple JSON data structure that changes infrequently. So infrequently that changing the data can mean just editing a plain text file and deploying your site again. + +Let's take a look at these scenarios and how you can get them working with Redwood. + +## Assumptions + +We assume you're deploying to Netlify in this recipe. Your mileage may vary for other providers or a custom build process. + +## Remove the /api directory + +Just delete the `/api` directory altogether and your app will still work in dev mode: + +```bash +rm -rf api +``` + +You can also run `yarn install` to cleanup those packages that aren't used any more. + +## Disable Prisma functionality +The `--prisma` and `--dm` flags are set to `true` by default and need to be set to `false` in the build command. + +```toml {4} +[build] + command = "yarn rw deploy netlify --prisma=false --dm=false" +``` + +While omitting these flags won't prevent you from developing the site in a local environment, not setting them to `false` will lead to a `'No Prisma Schema found'` error when you attempt to deploy your site to a production environment, at least when Netlify is the deployment target. + +## Turn off the API build process + +When it comes time to deploy, we need to let Netlify know that it shouldn't bother trying to look for any code to turn into AWS Lambda functions. + +Open up `netlify.toml`. We're going to comment out one line: + +```toml {4} +[build] + command = "yarn rw deploy netlify --prisma=false --dm=false" + publish = "web/dist" + # functions = "api/dist/functions" + +[dev] + command = "yarn rw dev" + +[[redirects]] + from = "/*" + to = "/index.html" + status = 200 +``` + +If you just have a static site that doesn't need any data access at all (even our simple JSON file discussed above) then you're done! Keep reading to see how you can access a local data store that we'll deploy along with the web side of our app. + +## Local JSON Fetch + +Let's display a graph of the weather forecast for the week of Jan 30, 2017 in Moscow, Russia. If this seems like a strangely specific scenario it's because that's the example data we can quickly get from the [OpenWeather API](https://openweathermap.org/forecast16). Get the JSON data [here](https://samples.openweathermap.org/data/2.5/forecast/daily?id=524901&appid=b1b15e88fa797225412429c1c50c122a1) or copy the following and save it to a file at `web/public/forecast.json`: + +```json +{ + "cod": "200", + "message": 0, + "city": { + "geoname_id": 524901, + "name": "Moscow", + "lat": 55.7522, + "lon": 37.6156, + "country": "RU", + "iso2": "RU", + "type": "city", + "population": 0 + }, + "cnt": 7, + "list": [ + { + "dt": 1485766800, + "temp": { + "day": 262.65, + "min": 261.41, + "max": 262.65, + "night": 261.41, + "eve": 262.65, + "morn": 262.65 + }, + "pressure": 1024.53, + "humidity": 76, + "weather": [ + { + "id": 800, + "main": "Clear", + "description": "sky is clear", + "icon": "01d" + } + ], + "speed": 4.57, + "deg": 225, + "clouds": 0, + "snow": 0.01 + }, + { + "dt": 1485853200, + "temp": { + "day": 262.31, + "min": 260.98, + "max": 265.44, + "night": 265.44, + "eve": 264.18, + "morn": 261.46 + }, + "pressure": 1018.1, + "humidity": 91, + "weather": [ + { + "id": 600, + "main": "Snow", + "description": "light snow", + "icon": "13d" + } + ], + "speed": 4.1, + "deg": 249, + "clouds": 88, + "snow": 1.44 + }, + { + "dt": 1485939600, + "temp": { + "day": 270.27, + "min": 266.9, + "max": 270.59, + "night": 268.06, + "eve": 269.66, + "morn": 266.9 + }, + "pressure": 1010.85, + "humidity": 92, + "weather": [ + { + "id": 600, + "main": "Snow", + "description": "light snow", + "icon": "13d" + } + ], + "speed": 4.53, + "deg": 298, + "clouds": 64, + "snow": 0.92 + }, + { + "dt": 1486026000, + "temp": { + "day": 263.46, + "min": 255.19, + "max": 264.02, + "night": 255.59, + "eve": 259.68, + "morn": 263.38 + }, + "pressure": 1019.32, + "humidity": 84, + "weather": [ + { + "id": 800, + "main": "Clear", + "description": "sky is clear", + "icon": "01d" + } + ], + "speed": 3.06, + "deg": 344, + "clouds": 0 + }, + { + "dt": 1486112400, + "temp": { + "day": 265.69, + "min": 256.55, + "max": 266, + "night": 256.55, + "eve": 260.09, + "morn": 266 + }, + "pressure": 1012.2, + "humidity": 0, + "weather": [ + { + "id": 600, + "main": "Snow", + "description": "light snow", + "icon": "13d" + } + ], + "speed": 7.35, + "deg": 24, + "clouds": 45, + "snow": 0.21 + }, + { + "dt": 1486198800, + "temp": { + "day": 259.95, + "min": 254.73, + "max": 259.95, + "night": 257.13, + "eve": 254.73, + "morn": 257.02 + }, + "pressure": 1029.5, + "humidity": 0, + "weather": [ + { + "id": 800, + "main": "Clear", + "description": "sky is clear", + "icon": "01d" + } + ], + "speed": 2.6, + "deg": 331, + "clouds": 29 + }, + { + "dt": 1486285200, + "temp": { + "day": 263.13, + "min": 259.11, + "max": 263.13, + "night": 262.01, + "eve": 261.32, + "morn": 259.11 + }, + "pressure": 1023.21, + "humidity": 0, + "weather": [ + { + "id": 600, + "main": "Snow", + "description": "light snow", + "icon": "13d" + } + ], + "speed": 5.33, + "deg": 234, + "clouds": 46, + "snow": 0.04 + } + ] +} +``` + +Any files that you put in `web/public` will be served by Netlify, skipping any build process. + +Next let's have a React component get that data remotely and then display it on a page. For this example we'll generate a homepage: + +```bash +yarn rw generate page home / +``` + +Next we'll use the browser's builtin `fetch()` function to get the data and then we'll just dump it to the screen to make sure it works: + +```jsx +import { useState, useEffect } from 'react' + +const HomePage = () => { + const [forecast, setForecast] = useState({}) + + useEffect(() => { + fetch('/forecast.json') + .then((response) => response.json()) + .then((json) => setForecast(json)) + }, []) + + return
{JSON.stringify(forecast)}
+} + +export default HomePage +``` + +We use `useState` to keep track of the forecast data and `useEffect` to actually trigger the loading of the data when the component mounts. Now we just need a graph! Let's add [chart.js](https://www.chartjs.org/) for some simple graphing: + +```bash +yarn workspace web add chart.js +``` + +Let's generate a sample graph: + +```jsx {1,2,5,15-32,34} +import { useState, useEffect, useRef } from 'react' +import Chart from 'chart.js' + +const HomePage = () => { + const chartRef = useRef() + + const [forecast, setForecast] = useState({}) + + useEffect(() => { + fetch('/forecast.json') + .then((response) => response.json()) + .then((json) => setForecast(json)) + }, []) + + useEffect(() => { + new Chart(chartRef.current.getContext('2d'), { + type: 'line', + data: { + labels: ['Jan', 'Feb', 'March'], + datasets: [ + { + label: 'High', + data: [86, 67, 91], + }, + { + label: 'Low', + data: [45, 43, 55], + }, + ], + }, + }) + }, [forecast]) + + return +} + +export default HomePage +``` + +![image](https://user-images.githubusercontent.com/300/80657460-7beaab80-8a38-11ea-886d-17040ef8573c.png) + +If that looks good then all that's left is to transform the weather data JSON into the format that Chart.js wants. Here's the final `HomePage` including a couple of functions to transform our data and display the dates properly: + +```jsx +import { useState, useEffect, useRef } from 'react' +import Chart from 'chart.js' + +const MONTHS = [ + 'Jan', + 'Feb', + 'Mar', + 'Apr', + 'May', + 'Jun', + 'Jul', + 'Aug', + 'Sep', + 'Oct', + 'Nov', + 'Dec', +] + +const getDates = (forecast) => { + return forecast.list.map((entry) => { + const date = new Date(0) + date.setUTCSeconds(entry.dt) + return `${MONTHS[date.getMonth()]} ${date.getDate()}` + }) +} + +const getTemps = (forecast) => { + return [ + { + label: 'High', + data: forecast.list.map((entry) => kelvinToFahrenheit(entry.temp.max)), + borderColor: 'red', + backgroundColor: 'transparent', + }, + { + label: 'Low', + data: forecast.list.map((entry) => kelvinToFahrenheit(entry.temp.min)), + borderColor: 'blue', + backgroundColor: 'transparent', + }, + ] +} + +const kelvinToFahrenheit = (temp) => { + return ((temp - 273.15) * 9) / 5 + 32 +} + +const HomePage = () => { + const chartRef = useRef() + + const [forecast, setForecast] = useState(null) + + useEffect(() => { + fetch('/forecast.json') + .then((response) => response.json()) + .then((json) => setForecast(json)) + }, []) + + useEffect(() => { + if (forecast) { + new Chart(chartRef.current.getContext('2d'), { + type: 'line', + data: { + labels: getDates(forecast), + datasets: getTemps(forecast), + }, + }) + } + }, [forecast]) + + return +} + +export default HomePage +``` + +If you got all of that right then you should see: + +![Chart screenshot](https://user-images.githubusercontent.com/300/80656934-32e62780-8a37-11ea-963e-0b227d7fe1df.png) + +All that's left is to deploy it to the world! + +## Wrapping Up + +Although we think Redwood will make app developers' lives easier when they need to talk to a database or third party API, it can be used with static sites and even hybrid sites like this when you want to digest and display data, but from a static file at your own URL. diff --git a/docs/versioned_docs/version-7.0/how-to/file-uploads.md b/docs/versioned_docs/version-7.0/how-to/file-uploads.md new file mode 100644 index 000000000000..bfdce858512d --- /dev/null +++ b/docs/versioned_docs/version-7.0/how-to/file-uploads.md @@ -0,0 +1,508 @@ +# File Uploads + +As you've probably heard, Redwood thinks the future is serverless. This concept introduces some interesting problems you might not have had to worry about in the past. For example, where do files go when you upload them? There's no server! Like many tasks you may have done [yourself](tutorial/chapter4/authentication.md) in the past, this is another job that we can farm out to a third-party service. + +## The Service + +There are many services out there that handle uploading files and serving them from a CDN. Two of the big ones are [Cloudinary](https://cloudinary.com) and [Filestack](https://filestack.com). We're going to demo a Filestack integration here because we've found it easy to integrate. In addition to storing your uploads and making them available via a CDN, they also offer on-the-fly image transformations so that even if someone uploads a Retina-ready 5000px wide headshot, you can shrink it down and only serve a 100px version for their avatar in the upper right corner of your site. You save bandwidth and transfer costs. + +We're going to sign up for a free plan which gives us 100 uploads a month, 1000 transformations (like resizing an image), 1GB of bandwidth, and 0.5GB of storage. That's more than enough for this demo. (And maybe even a low-traffic production site!) + +Head over to https://dev.filestack.com/signup/free/ and sign up. Be sure to use a real email address because they're going to send you a confirmation email before they let you log in. Once you verify your email, you'll be dropped on your dashboard where your API key will be shown in the upper right: + +![New image scaffold](https://user-images.githubusercontent.com/300/82616735-ec41a400-9b82-11ea-9566-f96089e35e52.png) + +Copy that (or at least keep the tab open) because we're going to need it in a minute. (I already changed that key so don't bother trying to steal it!) + +That's it on the Filestack side; on to the application. + +## The App + +Let's create a very simple DAM (Digital Asset Manager) that lets users upload and catalogue images. They'll be able to click the thumbnail to open a full-size version. + +Create a new Redwood app: + +```bash +yarn create redwood-app uploader +cd uploader +``` + +The first thing we'll do is create an environment variable to hold our Filestack API key. This is a best practice so that the key isn't living in our repository for prying eyes to see. Add the key to the `.env` file in the root of our app: + +```bash +REDWOOD_ENV_FILESTACK_API_KEY=AM18i8xV4QpoiGwetoTWd +``` + +> We're prefixing with `REDWOOD_ENV_` here to tell Vite that we want it to replace this variables with its actual value as it's processing pages and statically generating them. Otherwise our generated pages would still contain something like `process.env.FILESTACK_API_KEY`, which wouldn't exist when the pages are static and being served from a CDN. + +Now we can start our development server: + +```bash +yarn rw dev +``` + +### The Database + +We'll create a single model to store our image data: + +```javascript title="api/db/schema.prisma" +model Image { + id Int @id @default(autoincrement()) + title String + url String +} +``` + +`title` will be the user-supplied name for this asset and `url` will contain the public URL that Filestack creates after an upload. + +Create a migration to update the database; when prompted, name it "add image": + +```bash +yarn rw prisma migrate dev +``` + +To make our lives easier, let's scaffold the screens necessary to create/update/delete an image, then we'll worry about adding the uploader: + +```bash +yarn rw generate scaffold image +``` + +Now head to http://localhost:8910/images/new and let's figure this out! + +![New image scaffold](https://user-images.githubusercontent.com/300/82694608-653f0b00-9c18-11ea-8003-4dc4aeac7b86.png) + +## The Uploader + +Filestack has a couple of [React components](https://github.com/filestack/filestack-react) that handle all the uploading for us. Let's add the package: + +```bash +yarn workspace web add filestack-react +``` + +We want the uploader on our scaffolded form, so let's head over to `ImageForm`, import Filestack's inline picker, and try replacing the **Url** input with it: + +```jsx {9,49} title="web/src/components/ImageForm/ImageForm.js" +import { + Form, + FormError, + FieldError, + Label, + TextField, + Submit, +} from '@redwoodjs/forms' +import { PickerInline } from 'filestack-react' + +const formatDatetime = (value) => { + if (value) { + return value.replace(/:\d{2}\.\d{3}\w/, '') + } +} + +const ImageForm = (props) => { + const onSubmit = (data) => { + props.onSave(data, props?.image?.id) + } + + return ( +
+
+ + + + + + + + + +
+ + Save + +
+ +
+ ) +} + +export default ImageForm +``` + +We now have a picker with all kinds of options, like picking a local file, providing a URL, and even grabbing a file from Facebook, Instagram, or Google Drive. Not bad! + +![Filestack picker](https://user-images.githubusercontent.com/32992335/133859676-4086a4b9-8112-4a19-a4fe-5663388aafc0.png) + +You can even try uploading an image to make sure it works: + +![Upload](https://user-images.githubusercontent.com/300/82618035-bb636e00-9b86-11ea-9401-61b8c989f43c.png) + +> Make sure you click the **Upload** button that appears after picking your file. + +If you go over to the Filestack dashboard, you'll see that we've uploaded an image: + +![Filestack dashboard](https://user-images.githubusercontent.com/300/82618057-ccac7a80-9b86-11ea-9cd8-7a9e80a5a20f.png) + +But that doesn't help us attach anything to our database record. Let's do that. + +## The Data + +Let's see what's going on when an upload completes. The Filestack picker takes an `onSuccess` prop with a function to call when complete: + +```jsx {8-10,16} title="web/src/components/ImageForm/ImageForm.js" +// imports and stuff... + +const ImageForm = (props) => { + const onSubmit = (data) => { + props.onSave(data, props?.image?.id) + } + + const onFileUpload = (response) => { + console.info(response) + } + + // form stuff... + + +``` + +Well lookie here: + +![Uploader response](https://user-images.githubusercontent.com/300/82618071-ddf58700-9b86-11ea-9626-e093b4c8d853.png) + +`filesUploaded[0].url` seems to be exactly what we need—the public URL to the image that was just uploaded. Excellent! How about we use a little state to track that for us so it's available when we submit our form: + +```jsx {10,19,26} title="web/src/components/ImageForm/ImageForm.js" +import { + Form, + FormError, + FieldError, + Label, + TextField, + Submit, +} from '@redwoodjs/forms' +import { PickerInline } from 'filestack-react' +import { useState } from 'react' + +const formatDatetime = (value) => { + if (value) { + return value.replace(/:\d{2}\.\d{3}\w/, '') + } +} + +const ImageForm = (props) => { + const [url, setUrl] = useState(props?.image?.url) + + const onSubmit = (data) => { + props.onSave(data, props?.image?.id) + } + + const onFileUpload = (response) => { + setUrl(response.filesUploaded[0].url) + } + + return ( + // component stuff... +``` + +So we'll use `setState` to store the URL for the image. We default it to the existing `url` value, if it exists—remember that scaffolds use this same form for editing of existing records, where we'll already have a value for `url`. If we didn't store that url value somewhere then it would be overridden with `null` if we started editing an existing record! + +The last thing we need to do is set the value of `url` in the `data` object before it gets passed to the `onSave` handler: + +```jsx {2,3} title="web/src/components/ImageForm/ImageForm.js" +const onSubmit = (data) => { + const dataWithUrl = Object.assign(data, { url }) + props.onSave(dataWithUrl, props?.image?.id) +} +``` + +Now try uploading a file and saving the form: + +![Upload done](https://user-images.githubusercontent.com/300/82702493-f5844c80-9c26-11ea-8fc4-0273b92034e4.png) + +It worked! Next let's update the display here to actually show the image as a thumbnail and make it clickable to see the full version: + +```jsx {76-78} title="web/src/components/Images/Images.js" +import { useMutation } from '@redwoodjs/web' +import { toast } from '@redwoodjs/web/toast' +import { Link, routes } from '@redwoodjs/router' + +import { QUERY } from 'src/components/Image/ImagesCell' + +const DELETE_IMAGE_MUTATION = gql` + mutation DeleteImageMutation($id: Int!) { + deleteImage(id: $id) { + id + } + } +` + +const MAX_STRING_LENGTH = 150 + +const truncate = (text) => { + let output = text + if (text && text.length > MAX_STRING_LENGTH) { + output = output.substring(0, MAX_STRING_LENGTH) + '...' + } + return output +} + +const jsonTruncate = (obj) => { + return truncate(JSON.stringify(obj, null, 2)) +} + +const timeTag = (datetime) => { + return ( + + ) +} + +const checkboxInputTag = (checked) => { + return +} + +const ImagesList = ({ images }) => { + const [deleteImage] = useMutation(DELETE_IMAGE_MUTATION, { + onCompleted: () => { + toast.success('Image deleted') + }, + // This refetches the query on the list page. Read more about other ways to + // update the cache over here: + // https://www.apollographql.com/docs/react/data/mutations/#making-all-other-cache-updates + refetchQueries: [{ query: QUERY }], + awaitRefetchQueries: true, + }) + + const onDeleteClick = (id) => { + if (confirm('Are you sure you want to delete image ' + id + '?')) { + deleteImage({ variables: { id } }) + } + } + + return ( +
+ + + + + + + + + + + {images.map((image) => ( + + + + + + + ))} + +
IdTitleUrl 
{truncate(image.id)}{truncate(image.title)} + + + + + +
+
+ ) +} + +export default ImagesList +``` + +![Image](https://user-images.githubusercontent.com/300/82702575-1fd60a00-9c27-11ea-8d2f-047bcf4e9cae.png) + +## The Transform + +Remember when we mentioned that Filestack can save you bandwidth by transforming images on the fly? This page is a perfect example—the image is never bigger than 50px, why pull down the full resolution just for that tiny display? Here's how we can tell Filestack that whenever we grab this instance of the image, it only needs to be 100px. + +Why 100px? Most phones and many laptops and desktop displays are now 4k or larger. Images are actually displayed at at least double resolution on these displays, so even though it's "50px", it's really 100px when shown on these displays. So you'll usually want to bring down all images at twice their intended display resolution. + +We need to add a special indicator to the URL itself to trigger the transform so let's add a function that does that for a given image URL (this can go either inside or outside of the component definition): + +```jsx title="web/src/components/Images/Images.js" +const thumbnail = (url) => { + const parts = url.split('/') + parts.splice(3, 0, 'resize=width:100') + return parts.join('/') +} +``` + +What this does is turn a URL like + +``` +https://cdn.filestackcontent.com/81m7qIrURxSp7WHcft9a +``` + +into + +``` +https://cdn.filestackcontent.com/resize=width:100/81m7qIrURxSp7WHcft9a +``` + +Now we'll use the result of that function in the `` tag: + +```jsx title="web/src/components/Images/Images.js" + +``` + +Starting with an uploaded image of 157kB, the 100px thumbnail clocks in at only 6.5kB! Optimizing image delivery is almost always worth the extra effort! + +You can read more about the available transforms at [Filestack's API reference](https://www.filestack.com/docs/api/processing/). + +## The Improvements + +It'd be nice if, after uploading, you could see the image you uploaded. Likewise, when editing an image, it'd be helpful to see what's already attached. Let's make those improvements now. + +We're already storing the attached image URL in state, so let's use the existence of that state to show the attached image. In fact, let's also hide the uploader and assume you're done (you'll be able to show it again if needed): + +```jsx {5,8} title="web/src/components/ImageForm/ImageForm.js" + +
+
+ +{url && } +``` + +Now if you create a new image record, you'll see the picker, and as soon as the upload is complete, the uploaded image will pop into place. If you go to edit an image, you'll see the file that's already attached. + +> You should probably use the same resize-URL trick here to make sure it doesn't try to display a 10MB image immediately after uploading it. A max width of 500px may be good... + +Now let's add the ability to bring back the uploader if you decide you want to change the image. We can do that by clearing the image that's in state: + +```jsx {8-18} title="web/src/components/ImageForm/ImageForm.js" + +
+
+ +{url && ( +
+ + +
+)} +``` + +![Replace image button](https://user-images.githubusercontent.com/300/82719274-e7055780-9c5d-11ea-9a8a-8c1c72185983.png) + +We're borrowing the styles from the submit button and making sure that the image has both a top and bottom margin so it doesn't crash into the new button. + +## The Delete + +Having a free plan is great, but if you just load images and never actually remove the unnecessary ones, you'll be in trouble. + +To avoid this, we'd better implement the `deleteImage` mutation. It will enable you to make a call to the Filestack API to remove your resources and, on success, remove the row in the `Image` model. + +You are going to need a new ENV var called `REDWOOD_ENV_FILESTACK_SECRET`, which you can find in **Filestack > Security > Policy & Signature:** App Secret. Put this into your `.env` file (don't use this one of course, paste your own in there): + +```dotenv title=".env" +REDWOOD_ENV_FILESTACK_SECRET= PWRWGEKFZ2HJMXWSBP3YYI5ERZ +``` + +Filestack's library will provide a `getSecurity` method that will allow us to delete a resource, but only if executed on a **nodejs** environment. Hence, we need to execute the `delete` operation on the `api` side. + +Let's add the proper package: + +```shell +yarn workspace api add filestack-js +``` + +Great. Now we can modify our service accordingly: + +```js {4-23} title="api/src/services/image/image.ts" +import * as Filestack from 'filestack-js' + +export const deleteImage = async({ id }) => { + const client = Filestack.init(process.env.REDWOOD_ENV_FILESTACK_API_KEY) + + const image = await db.image.findUnique({ where: { id } }) + + // The `security.handle` is the unique part of the Filestack file's url. + const handle = image.url.split('/').pop() + + const security = Filestack.getSecurity( + { + // We set `expiry` at `now() + 5 minutes`. + expiry: new Date().getTime() + 5 * 60 * 1000, + handle, + call: ['remove'], + }, + process.env.REDWOOD_ENV_FILESTACK_SECRET + ) + + await client.remove(handle, security) + + return db.image.delete({ where: { id } } ) +} +``` + +Great! Now when you click the button in the frontend, the service will make a call to Filestack to remove the image from the service first. We set `expiry` to 20 seconds so that our policy expires 20 seconds after its generation, this is more than enough to protect your access while executing such operation. + +Assuming the request to `remove()` the image succeeded, we then delete it locally. If you wanted to be extra safe you could surround the `remove()` call with a try/catch block and then throw your own error if Filestack ends up throwing an error. + +## The Wrap-up + +Files uploaded! + +There's plenty of ways to integrate a file picker. This is just one, but we think it's simple, yet flexible. We use the same technique on the [example-blog](https://github.com/redwoodjs/example-blog). + +Have fun and get uploading! diff --git a/docs/versioned_docs/version-7.0/how-to/gotrue-auth.md b/docs/versioned_docs/version-7.0/how-to/gotrue-auth.md new file mode 100644 index 000000000000..c6cdbd40eb30 --- /dev/null +++ b/docs/versioned_docs/version-7.0/how-to/gotrue-auth.md @@ -0,0 +1,706 @@ +# GoTrue Auth + +If you've completed the [Authentication section](../tutorial/chapter4/authentication.md) of The Tutorial, you've seen how you can add the [Netlify Identity Widget](https://github.com/netlify/netlify-identity-widget) to your Redwood app in a matter of minutes. +But what do you do if you want to use Netlify Identity, but ditch the widget? There are many cases where we want much more control over our authentication interface and functionality, while still maintaining some _ease-of-use_ when it comes to development. + +Enter [GoTrue-JS](https://github.com/netlify/gotrue-js), a client library for interfacing with Netlify Identity's GoTrue API. + +In this recipe, we'll: + +- [configure Redwood Auth with GoTrue-JS](#generate-auth-configuration), +- [create a Sign Up form](#sign-up), +- [create a Sign In form](#sign-in), +- [create a Sign Out button](#sign-out), +- [add auth links](#auth-links) that display the correct buttons based on our auth state + +But first, some housekeeping... + +## Prerequisites + +Before getting started, there are a few steps you should have completed: + +- [Create a Redwood app](../tutorial/chapter1/installation.md) +- [Create a Netlify account](https://www.netlify.com/) +- [Deploy your Netlify site](../tutorial/chapter4/deployment.md) +- [Enable Netlify Identity](#enable-netlify-identity) +- Fire up a dev server: `yarn redwood dev` + +### Enable Netlify Identity + +Unless you've skipped the [requirements](#prerequisites) section (for shame!), you should already have a Netlify account and a site set up. If you'd be so kind, navigate to your site's **Dashboard**, head to the **Identity** tab, and click **Enable Identity**: + +![Netlify Identity screenshot](https://user-images.githubusercontent.com/300/82271191-f5850380-992b-11ea-8061-cb5f601fa50f.png) + +Now you should see an Identity API endpoint, e.g. `https://my-bodacious-app.netlify.app/.netlify/identity`. Copy and paste that somewhere—we'll need it in a moment when we instantiate GoTrue-JS. + +## Generate Auth Configuration + +Let's start by installing the required packages and generating boilerplate code and files for Redwood Auth, all with this simple [CLI command](../cli-commands.md#setup-auth): + +```bash +yarn redwood setup auth goTrue +``` + +By specifying `goTrue` as the provider, Redwood automatically added the necessary GoTrue-JS config to our App.js. Let's open up `web/src/App.js` and inspect. You should see: + +```jsx {1-2,11-14,18,22} title="web/src/App.js" +import { AuthProvider } from '@redwoodjs/auth' +import GoTrue from 'gotrue-js' +import { FatalErrorBoundary } from '@redwoodjs/web' +import { RedwoodApolloProvider } from '@redwoodjs/web/apollo' + +import FatalErrorPage from 'src/pages/FatalErrorPage' +import Routes from 'src/Routes' + +import './index.css' + +const goTrueClient = new GoTrue({ + APIUrl: 'https://MYAPP.netlify.app/.netlify/identity', + setCookie: true, +}) + +const App = () => ( + + + + + + + +) + +export default App +``` + +Time to use that API endpoint we copied from the Netlify Identity page. Replace the value of `APIUrl` with your API endpoint. For example: + +```jsx {4} title="web/src/App.js" +// imports... + +const goTrueClient = new GoTrue({ + APIUrl: 'https://gotrue-recipe.netlify.app/.netlify/identity', + setCookie: true, +}) +``` + +That's all for configuration. Easy! + +## Sign Up + +Sign Up feels like an appropriate place to start building our interface. + +Our first iteration won't include features like Email Confirmation or Password Recovery. Those, among other features, will be covered in the Advanced Concepts section of this recipe (coming soon). + +To forego email confirmation, head back over to your site's **Netlify Dashboard**, open the **Identity** tab, and click **Settings and usage**. + +![Netlify Identity Settings screenshot](https://user-images.githubusercontent.com/458233/86220685-ed86c900-bb51-11ea-9d74-f1ee4ab0a91b.png) + +In **Emails > Confirmation template**, click **Edit settings**, check **Allow users to sign up without verifying their email address**, and hit **Save**. + +![Netlify Identity Confirmation template](https://user-images.githubusercontent.com/458233/86221090-7140b580-bb52-11ea-8530-b1a7be937c56.png) + +Nicely done. Now, back to our app. + +**The Sign Up Page** + +Let's generate a Sign Up page: + +```bash +yarn redwood generate page Signup +``` + +This adds a Signup [route](../router.md#router-and-route) to our routes file and creates a SignupPage component. + +In the just-generated SignupPage component (`web/src/pages/SignupPage/SignupPage.js`), let's import some [Redwood Form components](../forms.md) and add a very basic form to our render component: + +```jsx title="web/src/pages/SignupPage/SignupPage.js" +import { Form, TextField, PasswordField, Submit } from '@redwoodjs/forms' + +const SignupPage = () => { + return ( + <> +

Sign Up

+
+ + + Sign Up + + + ) +} + +export default SignupPage +``` + +Did I mention it was basic? If you want to add some polish, you might find both the [Redwood Form docs](https://5efa4336f1e71f00081df803--redwoodjs.netlify.app/docs/form) and the [tutorial section on forms](https://5efa4336f1e71f00081df803--redwoodjs.netlify.app/tutorial/everyone-s-favorite-thing-to-build-forms) quite useful. For our purposes, let's just focus on the functionality. + +Now that we have a form interface, we're going to want to do something when the user submits it. Let's add an `onSubmit` function to our component and pass it as a prop to our Form component: + +```jsx {4-6,11} title="web/src/pages/SignupPage/SignupPage.js" +// imports... + +const SignupPage = () => { + const onSubmit = (data) => { + // do something here + } + + return ( + <> +

Sign Up

+
+ + + Sign Up + + + ) +} +//... +``` + +The _something_ we need to do is—surprise!—sign up. To do this, we'll need a way to communicate with `` and the GoTrue-JS client we passed to it. Look no further than the [`useAuth` hook](https://redwoodjs.com/docs/authentication#api), which lets us subscribe to our auth state and its properties. In our case, we'll be glad to now have access to `client` and, thusly, our GoTrue-JS instance and [all of its functions](https://github.com/netlify/gotrue-js/blob/master/README.md#authentication-examples). + +Let's import `useAuth` and destructure `client` from it in our component: + +```jsx {2,5} title="web/src/pages/SignupPage/SignupPage.js" +import { Form, TextField, PasswordField, Submit } from '@redwoodjs/forms' +import { useAuth } from '@redwoodjs/auth' + +const SignupPage = () => { + const { client } = useAuth() + + const onSubmit = (data) => { + // do something here + } + + return ( + <> +

Sign Up

+
+ + + Sign Up + + + ) +} + +export default SignupPage +``` + +And now we'll attempt to create a new user in the `onSubmit` function with [`client.signup()`](https://github.com/netlify/gotrue-js/blob/master/README.md#create-a-new-user) by passing in the `email` and `password` values that we've captured from our form: + +```jsx {8-11} title="web/src/pages/SignupPage/SignupPage.js" +import { Form, TextField, PasswordField, Submit } from '@redwoodjs/forms' +import { useAuth } from '@redwoodjs/auth' + +const SignupPage = () => { + const { client } = useAuth() + + const onSubmit = (data) => { + client + .signup(data.email, data.password) + .then((res) => console.log(res)) + .catch((error) => console.log(error)) + } + + return ( + <> +

Sign Up

+
+ + + Sign Up + + + ) +} + +export default SignupPage +``` + +Presently, our sign up will work as is, but simply console-logging the response from `client.signup()` is hardly useful behavior. + +Let's display errors to the user if there is one. To do this, we'll set up `React.useState()` to manage our error state and conditionally render the error message if there is one. We'll also want to reset the error state at the beginning of every submission with `setError(null)`: + +```jsx {6,9,13,20} title="web/src/pages/SignupPage/SignupPage.js" +import { Form, TextField, PasswordField, Submit } from '@redwoodjs/forms' +import { useAuth } from '@redwoodjs/auth' + +const SignupPage = () => { + const { client } = useAuth() + const [error, setError] = React.useState(null) + + const onSubmit = (data) => { + setError(null) + client + .signup(data.email, data.password) + .then((res) => console.log(res)) + .catch((error) => setError(error.message)) + } + + return ( + <> +

Sign Up

+
+ {error &&

{error}

} + + + Sign Up + + + ) +} + +export default SignupPage +``` + +Now we can handle a successful submission. Once a user has signed up, we should direct them to the sign in page that we'll be building out in the next section. + +Start by [generating](../cli-commands.md#generate-page) a sign in page: + +```bash +yarn redwood generate page Signin +``` + +Back in our `SignupPage`, let's import `routes` and `navigate` from [Redwood Router](../router.md#navigate) and use them to redirect on successful sign up: + +```jsx {3,13} title="web/src/pages/SignupPage/SignupPage.js" +import { Form, TextField, PasswordField, Submit } from '@redwoodjs/forms' +import { useAuth } from '@redwoodjs/auth' +import { routes, navigate } from '@redwoodjs/router' + +const SignupPage = () => { + const { client } = useAuth() + const [error, setError] = React.useState(null) + + const onSubmit = (data) => { + setError(null) + client + .signup(data.email, data.password) + .then(() => navigate(routes.signin())) + .catch((error) => setError(error.message)) + } + + return ( + <> +

Sign Up

+
+ {error &&

{error}

} + + + Sign Up + + + ) +} + +export default SignupPage +``` + +Hoorah! We've just added a sign up page and created a sign up form. We created a function to sign up users and we redirect users to the sign up page upon successful submission. Let's move on to Sign In. + +## Sign In + +Let's get right to it. In the SigninPage we generated in the last section, let's add a basic form with `email` and `password` fields, some error reporting setup, and a hollow `onSubmit` function: + +```jsx title="web/src/pages/SigninPage/SigninPage.js" +import { Form, TextField, PasswordField, Submit } from '@redwoodjs/forms' + +const SigninPage = () => { + const [error, setError] = React.useState(null) + + const onSubmit = (data) => { + // do sign in here + } + + return ( + <> +

Sign In

+
+ {error &&

{error}

} + + + Sign In + + + ) +} + +export default SigninPage +``` + +Then we'll need to import `useAuth` from `@redwoodjs/auth` and destructure `logIn` so that we can use it in our `onSubmit` function: + +```jsx {2,5} title="web/src/pages/SigninPage/SigninPage.js" +import { Form, TextField, PasswordField, Submit } from '@redwoodjs/forms' +import { useAuth } from '@redwoodjs/auth' + +const SigninPage = () => { + const { logIn } = useAuth() + const [error, setError] = React.useState(null) + + const onSubmit = (data) => { + setError(null) + // do sign in here + } + + return ( + <> +

Sign In

+
+ {error &&

{error}

} + + + Sign In + + + ) +} + +export default SigninPage +``` + +Now we'll add `logIn` to our `onSubmit` function. This time we'll be passing an object to our function as we're using Redwood Auth's logIn function directly (as opposed to `client`). This object takes an email, password, and a remember boolean. We'll also chain on `then` and `catch` to handle the response: + +```jsx {10-14} title="web/src/pages/SigninPage/SigninPage.js" +import { Form, TextField, PasswordField, Submit } from '@redwoodjs/forms' +import { useAuth } from '@redwoodjs/auth' + +const SigninPage = () => { + const { logIn } = useAuth() + const [error, setError] = React.useState(null) + + const onSubmit = (data) => { + setError(null) + logIn({ email: data.email, password: data.password, remember: true }) + .then(() => { + // do something + }) + .catch((error) => setError(error.message)) + } + + return ( + <> +

Sign In

+
+ {error &&

{error}

} + + + Sign In + + + ) +} + +export default SigninPage +``` + +Now then, upon a successful login let's redirect our user back to the home page. First, [generate](../cli-commands.md#generate-page) a homepage (if you haven't already): + +```bash +yarn redwood generate page Home / +``` + +In our `SigninPage`, import `navigate` and `routes` from [`@redwoodjs/router`](../router.md) and add them to the `then` function: + +```jsx {3,12} title="web/src/pages/SigninPage/SigninPage.js" +import { Form, TextField, PasswordField, Submit } from '@redwoodjs/forms' +import { useAuth } from '@redwoodjs/auth' +import { navigate, routes } from '@redwoodjs/router' + +const SigninPage = () => { + const { logIn } = useAuth() + const [error, setError] = React.useState(null) + + const onSubmit = (data) => { + setError(null) + logIn({ email: data.email, password: data.password, remember: true }) + .then(() => navigate(routes.home())) + .catch((error) => setError(error.message)) + } + + return ( + <> +

Sign In

+
+ {error &&

{error}

} + + + Sign In + + + ) +} + +export default SigninPage +``` + +Well done! We've created a sign in page and form and we successfully handle sign in. Next up... + +## Sign Out + +Sign out is by far the easiest auth functionality to implement: all we need to do is fire off useAuth's `logOut` method. + +Let's start by [generating a component](../cli-commands.md#generate-component) to house our Sign Out Button: + +```bash +yarn redwood generate component SignoutBtn +``` + +In the `web/src/components/SignoutBtn/SignoutBtn.js` file we just generated, let's render a button and add a click handler: + +```jsx title="web/src/components/SignoutBtn/SignoutBtn.js" +const SignoutBtn = () => { + const onClick = () => { + // do sign out here. + } + return +} + +export default SignoutBtn +``` + +Now we can import [`useAuth` from `@redwoodjs/auth`](../authentication.md#api). We'll destructure its `logOut` method and invoke it in the `onClick` function: + +```jsx {1,4,7} title="web/src/components/SignoutBtn/SignoutBtn.js" +import { useAuth } from '@redwoodjs/auth' + +const SignoutBtn = () => { + const { logOut } = useAuth() + + const onClick = () => { + logOut() + } + + return +} + +export default SignoutBtn +``` + +This works as is, but, because the user may be in a private area of your app when the Sign Out button is clicked, we should make sure we also navigate the user away from this page: + +```jsx {2,8} title="web/src/components/SignoutBtn/SignoutBtn.js" +import { useAuth } from '@redwoodjs/auth' +import { navigate, routes } from '@redwoodjs/router' + +const SignoutBtn = () => { + const { logOut } = useAuth() + + const onClick = () => { + logOut().then(() => navigate(routes.home())) + } + + return +} + +export default SignoutBtn +``` + +And that's it for Sign Out! Err, of course, we're not rendering it anywhere in our app yet. In the next section, well add some navigation that conditionally renders the appropriate sign up, sign in, and sign out buttons based on our authentication state. + +## Auth Links + +Here we'll implement some auth-related navigation that conditionally renders the correct links and buttons based on the user's authentication state. + +- When the user is not logged in, we should see **Sign Up** and **Sign In**. +- When the user is logged in, we should see **Log Out**. + +Let's start by [generating a navigation component](../cli-commands.md#generate-component): + +```bash +yarn redwood generate component Navigation +``` + +This creates `web/src/components/Navigation/Navigation.js`. In that file, let's import [the `Link` component and the `routes` object](../router.md#link-and-named-route-functions) from `@redwoodjs/router`. + +We'll also import [`useAuth`](../authentication.md#api) since we'll need to subscribe to the auth state in order for our components to decide what to render: + +```jsx title="web/src/components/Navigation/Navigation.js" +import { Link, routes } from '@redwoodjs/router' +import { useAuth } from '@redwoodjs/auth' + +const Navigation = () => { + return +} + +export default Navigation +``` + +Let's destructure [`isAuthenticated` from the `useAuth`](../authentication.md#api) API and apply it to some conditionals in the render method: + +```jsx {5,8-12} title="web/src/components/Navigation/Navigation.js" +import { Link, routes } from '@redwoodjs/router' +import { useAuth } from '@redwoodjs/auth' + +const Navigation = () => { + const { isAuthenticated } = useAuth() + return ( + + ) +} + +export default Navigation +``` + +Because Redwood Auth uses [React's Context API](https://reactjs.org/docs/context.html) to manage and broadcast the auth state, we can be confident that `isAuthenticated` will always be up-to-date, even if it changes from within another component in the tree (so long as it's a child of ``). In our case, when `isAuthenticated` changes, React will auto-magically take care of rendering the appropriate components. + +So, now let's import our sign out button and add it, as well as sign in and sign up links, to the appropriate blocks in the conditional: + +```jsx {3,9-16} title="web/src/components/Navigation/Navigation.js" +import { Link, routes } from '@redwoodjs/router' +import { useAuth } from '@redwoodjs/auth' +import SignoutBtn from 'src/components/SignoutBtn/SignoutBtn' + +const Navigation = () => { + const { isAuthenticated } = useAuth() + return ( + + ) +} + +export default Navigation +``` + +We have a working navigation component, but we still need to render it somewhere. Let's [generate a layout](../cli-commands.md#generate-layout) called GlobalLayout: + +```bash +yarn redwood generate layout Global +``` + +Then import and render the navigation component in the newly generated `web/src/layouts/GlobalLayout/GlobalLayout.js`: + +```jsx title="web/src/layouts/GlobalLayout/GlobalLayout.js" +import Navigation from 'src/components/Navigation/Navigation' + +const GlobalLayout = ({ children }) => { + return ( + <> +
+ +
+
{children}
+ + ) +} + +export default GlobalLayout +``` + +Finally, we'll import and wrap each of our generated pages in this GlobalLayout component: + +**Home** + +```jsx title="web/src/pages/HomePage/Homepage.js" +import GlobalLayout from 'src/layouts/GlobalLayout/GlobalLayout' + +const HomePage = () => { + return ( + +

Home

+

My Gotrue Redwood Auth

+
+ ) +} + +export default HomePage +``` + +**Sign Up** + +```jsx title="web/src/pages/SignupPage/SignupPage.js" +import { Form, TextField, PasswordField, Submit } from '@redwoodjs/forms' +import { useAuth } from '@redwoodjs/auth' +import { routes, navigate } from '@redwoodjs/router' + +import GlobalLayout from 'src/layouts/GlobalLayout/GlobalLayout' + +const SignupPage = () => { + const { client } = useAuth() + const [error, setError] = React.useState(null) + + const onSubmit = (data) => { + setError(null) + client + .signup(data.email, data.password) + .then(() => navigate(routes.signin())) + .catch((error) => setError(error.message)) + } + + return ( + +

Sign Up

+
+ {error &&

{error}

} + + + Sign Up + +
+ ) +} + +export default SignupPage +``` + +**Sign In** + +```jsx title="web/src/pages/SigninPage/SigninPage.js" +import { Form, TextField, PasswordField, Submit } from '@redwoodjs/forms' +import { useAuth } from '@redwoodjs/auth' +import { navigate, routes } from '@redwoodjs/router' + +import GlobalLayout from 'src/layouts/GlobalLayout/GlobalLayout' + +const SigninPage = () => { + const { logIn } = useAuth() + const [error, setError] = React.useState(null) + + const onSubmit = (data) => { + setError(null) + logIn({ email: data.email, password: data.password, remember: true }) + .then(() => navigate(routes.home())) + .catch((error) => setError(error.message)) + } + + return ( + +

Sign In

+
+ {error &&

{error}

} + + + Sign In + +
+ ) +} + +export default SigninPage +``` + +Now we have navigation that renders the correct links and buttons based on our auth state. When the user signs in, they'll see a **Sign Out** button. When the user signs out, they'll see **Sign Up** and **Sign In** links. + +## Wrapping Up + +We've configured GoTrue with Redwood Auth, created a Sign Up page, a Sign In page, a Sign Out button, and added auth links to our layout. Nicely done! + +Thanks for tuning in! + +> If you spot an error or have trouble completing any part of this recipe, please feel free to open an issue on [Github](https://github.com/redwoodjs/redwood) or create a topic on our [community forum](https://community.redwoodjs.com/). diff --git a/docs/versioned_docs/version-7.0/how-to/mocking-graphql-in-storybook.md b/docs/versioned_docs/version-7.0/how-to/mocking-graphql-in-storybook.md new file mode 100644 index 000000000000..b6353e087a95 --- /dev/null +++ b/docs/versioned_docs/version-7.0/how-to/mocking-graphql-in-storybook.md @@ -0,0 +1,114 @@ +# Mocking GraphQL in Storybook + +## Pre-requisites + +1. Storybook should be running, start it by running `yarn rw storybook` +2. Have a Cell, Query, or Mutation that you would like to mock + +## Where to put mock-requests + +1. Mock-requests placed in a file ending with `.mock.js|ts` are automatically imported and become globally scoped, which means that they will be available in all of your stories. +2. Mock-requests in a story will be locally scoped and will overwrite globally scoped mocks. + +## Mocking a Cell's Query + +Locate the file ending with `.mock.js` in your Cell's folder. This file exports a value named `standard`, which is the mock-data that will be returned for your Cell's `QUERY`. +```jsx {3,4,5,11,12,13} title="UserProfileCell/UserProfileCell.js" +export const QUERY = gql` + query UserProfileQuery { + userProfile { + id + } + } +` + +// UserProfileCell/UserProfileCell.mock.js +export const standard = { + userProfile: { + id: 42 + } +} +``` + +The value assigned to `standard` is the mock-data associated to the `QUERY`, so modifying the `QUERY` means you need to modify the mock-data. +```diff title="UserProfileCell/UserProfileCell.js" +export const QUERY = gql` + query UserProfileQuery { + userProfile { + id ++ name + } + } +` + +// UserProfileCell/UserProfileCell.mock.js +export const standard = { + userProfile: { + id: 42, ++ name: 'peterp', + } +} +``` + +> Behind the scenes: Redwood uses the value associated to `standard` as the second argument to `mockGraphQLQuery`. + +### GraphQL request variables + +If you want to dynamically modify mock-data based on a queries variables the `standard` export can also be a function, and the first parameter will be an object containing the variables: +```jsx {1,6} title="UserProfileCell/UserProfileCell.mock.js" +export const standard = (variables) => { + return { + userProfile: { + id: 42, + name: 'peterp', + profileImage: `https://example.com/profile.png?size=${variables.size}` + } + } +} +``` + +## Mocking a GraphQL Query + +If you're not using a Cell, or if you want to overwrite a globally scoped mock, you can use `mockGraphQLQuery`: + +```jsx title="Header/Header.stories.js" +export const withReallyLongName = () => { + mockGraphQLQuery('UserProfileQuery', () => { + return { + userProfile: { + id: 99, + name: 'Hubert Blaine Wolfeschlegelsteinhausenbergerdorff Sr.' + } + } + }) + return
+} +``` + +## Mocking a GraphQL Mutation + +Use `mockGraphQLMutation`: + +```jsx title="UserProfileCell/UserProfileCell.mock.js" +export const standard = /* ... */ + +mockGraphQLMutation('UpdateUserName', ({ name }) => { + return { + userProfile: { + id: 99, + name, + } + } +}) +``` + +## Mock-requests that intentionally produce errors + +`mockGraphQLQuery` and `mockGraphQLMutation` have access to `ctx` which allows you to modify the mock-response: + +```jsx +mockGraphQLQuery('UserProfileQuery', (_vars, { ctx }) => { + // Forbidden + ctx.status(403) +}) +``` diff --git a/docs/versioned_docs/version-6.0/how-to/oauth.md b/docs/versioned_docs/version-7.0/how-to/oauth.md similarity index 99% rename from docs/versioned_docs/version-6.0/how-to/oauth.md rename to docs/versioned_docs/version-7.0/how-to/oauth.md index 7b7e6083985a..4b65257ef5cf 100644 --- a/docs/versioned_docs/version-6.0/how-to/oauth.md +++ b/docs/versioned_docs/version-7.0/how-to/oauth.md @@ -133,7 +133,7 @@ Go ahead and click it, and you should be taken to GitHub to authorize your GitHu ![GitHub Oauth Access Page](https://user-images.githubusercontent.com/300/245899872-8ddd7e69-dbfa-4544-ab6f-78fd4ff02da8.png) -:::caution +:::warning If you get an error here that says "The redirect_uri MUST match the registered callback URL for this application" verify that the redirect URL you entered on GitHub and the one you put into the `GITHUB_OAUTH_REDIRECT_URL` ENV var are identical! diff --git a/docs/versioned_docs/version-7.0/how-to/pagination.md b/docs/versioned_docs/version-7.0/how-to/pagination.md new file mode 100644 index 000000000000..d76f75a9b0ff --- /dev/null +++ b/docs/versioned_docs/version-7.0/how-to/pagination.md @@ -0,0 +1,165 @@ +# Pagination + +This tutorial will show you one way to implement pagination in an app built using RedwoodJS. It builds on top of [the tutorial](../tutorial/foreword.md) and I'll assume you have a folder with the code from the tutorial that you can continue working on. (If you don't, you can clone this repo: https://github.com/thedavidprice/redwood-tutorial-test) + +![redwoodjs-pagination](https://user-images.githubusercontent.com/30793/94778130-ec6d6e00-03c4-11eb-9fd0-97cbcdf68ec2.png) + +The screenshot above shows what we're building. See the pagination at the bottom? The styling is up to you to fix. + +So you have a blog, and probably only a few short posts. But as the blog grows bigger you'll soon need to paginate all your posts. So, go ahead and create a bunch of posts to make this pagination worthwhile. We'll display five posts per page, so begin with creating at least six posts, to get two pages. + +We'll begin by updating the SDL. To our `Query` type a new query is added to get just a single page of posts. We'll pass in the page we want, and when returning the result we'll also include the total number of posts as that'll be needed when building our pagination component. + +```javascript title="api/src/graphql/posts.sdl.js" +export const schema = gql` + # ... + + type PostPage { + posts: [Post!]! + count: Int! + } + + type Query { + postPage(page: Int): PostPage + posts: [Post!]! + post(id: Int!): Post! + } + + # ... + ` +``` + +You might have noticed that we made the page optional. That's because we want to be able to default to the first page if no page is provided. + +Now we need to add a resolver for this new query to our posts service. +```javascript title="api/src/services/posts/posts.js" +const POSTS_PER_PAGE = 5 + +export const postPage = ({ page = 1 }) => { + const offset = (page - 1) * POSTS_PER_PAGE + + return { + posts: db.post.findMany({ + take: POSTS_PER_PAGE, + skip: offset, + orderBy: { createdAt: 'desc' }, + }), + count: db.post.count(), + } +} +``` + +So now we can make a GraphQL request (using [Apollo](https://www.apollographql.com/)) for a specific page of our blog posts. And the resolver we just updated will use [Prisma](https://www.prisma.io/) to fetch the correct posts from our database. + +With these updates to the API side of things done, it's time to move over to the web side. It's the BlogPostsCell component that makes the gql query to display the list of blog posts on the HomePage of the blog, so let's update that query. + +```jsx title="web/src/components/BlogPostsCell/BlogPostsCell.js" +export const QUERY = gql` + query BlogPostsQuery($page: Int) { + postPage(page: $page) { + posts { + id + title + body + createdAt + } + count + } + } +` +``` + +The `Success` component in the same file also needs a bit of an update to handle the new gql query result structure. + +```jsx title="web/src/components/BlogPostsCell/BlogPostsCell.js" +export const Success = ({ postPage }) => { + return postPage.posts.map((post) => ) +} +``` + +Now we need a way to pass a value for the `page` parameter to the query. To do that we'll take advantage of a little RedwoodJS magic. Remember from the tutorial how you made the post id part of the route path `()` and that id was then sent as a prop to the BlogPostPage component? We'll do something similar here for the page number, but instead of making it a part of the url path, we'll make it a url query string. These, too, are magically passed as a prop to the relevant page component. And you don't even have to update the route to make it work! Let's update `HomePage.js` to handle the prop. + +```jsx title="web/src/pages/HomePage/HomePage.js" +const HomePage = ({ page = 1 }) => { + return ( + + + + ) +} +``` + +So now if someone navigates to https://awesomeredwoodjsblog.com?page=2 (and the blog was actually hosted on awesomeredwoodjsblog.com), then `HomePage` would have its `page` prop set to `"2"`, and we then pass that value along to `BlogPostsCell`. If no `?page=` query parameter is provided `page` will default to `1` + +Going back to `BlogPostsCell` there is one me thing to add before the query parameter work. + +```jsx title="web/src/components/BlogPostsCell/BlogPostsCell.js" +export const beforeQuery = ({ page }) => { + page = page ? parseInt(page, 10) : 1 + + return { variables: { page } } +} +``` + +The query parameter is passed to the component as a string, so we need to parse it into a number. + +If you run the project with `yarn rw dev` on the default port 8910 you can now go to http://localhost:8910 and you should only see the first five posts. Change the URL to http://localhost:8910?page=2 and you should see the next five posts (if you have that many, if you only have six posts total you should now see just one post). + +The final thing to add is a page selector, or pagination component, to the end of the list of posts to be able to click and jump between the different pages. + +Generate a new component with`yarn rw g component Pagination` + +```jsx title="web/src/components/Pagination/Pagination.js" +import { Link, routes } from '@redwoodjs/router' + +const POSTS_PER_PAGE = 5 + +const Pagination = ({ count }) => { + const items = [] + + for (let i = 0; i < Math.ceil(count / POSTS_PER_PAGE); i++) { + items.push( +
  • + + {i + 1} + +
  • + ) + } + + return ( + <> +

    Pagination

    +
      {items}
    + + ) +} + +export default Pagination +``` + +Keeping with the theme of the official RedwoodJS tutorial we're not adding any css, but if you wanted the pagination to look a little nicer it'd be easy to remove the bullets from that list, and make it horizontal instead of vertical. + +Finally let's add this new component to the end of `BlogPostsCell`. Don't forget to `import` it at the top as well. + +```jsx title="web/src/components/BlogPostsCell/BlogPostsCell.js" +import Pagination from 'src/components/Pagination' + +// ... + +export const Success = ({ postPage }) => { + return ( + <> + {postPage.posts.map((post) => )} + + + + ) +} +``` + +And there you have it! You have now added pagination to your redwood blog. One technical limitation to the current implementation is that it doesn't handle too many pages very gracefully. Just imagine what that list of pages would look like if you had 100 pages! It's left as an exercise to the reader to build a more fully featured Pagination component. + +Most of the code in this tutorial was copy/pasted from the ["Hammer Blog" RedwoodJS example](https://github.com/redwoodjs/example-blog) + +If you want to learn more about [pagination with Prisma](https://www.prisma.io/docs/reference/tools-and-interfaces/prisma-client/pagination) and [pagination with Apollo](https://www.apollographql.com/docs/react/data/pagination/) they both have excellent docs on the topic. diff --git a/docs/versioned_docs/version-7.0/how-to/role-based-access-control.md b/docs/versioned_docs/version-7.0/how-to/role-based-access-control.md new file mode 100644 index 000000000000..da138d62d71f --- /dev/null +++ b/docs/versioned_docs/version-7.0/how-to/role-based-access-control.md @@ -0,0 +1,626 @@ +--- +slug: role-based-access-control-rbac +--- + +# Role-based Access Control (RBAC) + +Role-based access control (RBAC) in RedwoodJS aims to be a simple, manageable approach to access management. It adds control over who can access routes, see features, or invoke services or functions to the existing `useAuth()` hook on the web side and `requireAuth()` helper on the api side. + +A **role** is a collection of permissions applied to a set of users based on the part they play in an organization or setting. Using roles makes it easier to add, remove, and adjust these permissions as your user base increases in scale and functionality increases in complexity. + +This how to examines how RBAC is implemented in RedwoodJS and how to protect areas of your app's sides -- web, api, or custom. + +### Quick Links + +- Authentication vs Authorization +- House and Blog Role-access Examples +- Identity as a Service +- How To Code Examples +- Additional Resources + +## Authentication vs Authorization + +How is Authorization different from Authentication? + +- **Authentication** is the act of validating that users are who they claim to be. +- **Authorization** is the process of giving the user permission to access a specific resource or function. + +In even more simpler terms authentication is the _process_ of verifying oneself, while authorization is the _process_ of verifying what you have access to. + +### House and Blog Role-access Examples + +When thinking about security, it helps to think in terms of familiar examples. + +Let's consider one from the physical world -- access to the various rooms of a 🏠 house -- and compare it to a digital example of a Blog. + +#### RBAC Example: House + +Consider a 🏠 while you are away on vacation. + +You are the **_owner_** and have given out 🔑 keys to your **neighbor** and a **plumber** that unlock the 🏠 🚪 door. + +You've assigned them passcodes to turn off the 🚨 alarm that identifies them as either a neighbor or plumber. + +Your neighbor can enter the kitchen to get food to feed your 😸 and the your office to water your 🌵 and also use the 🚽. + +The plumber can access the basement to get at the pipes, use the 🚽, access the laundry or 🍴 kitchen to fix the sink, but not your office. + +Neither of them should be allowed into your 🛏 bedroom. + +The owner knows who they claim to be and has given them keys. + +The passcodes inform what access they have because it says if they are a neighbor or plumber. + +If your 🏠 could enforce RBAC, it needs to know the rules. + +#### Role Matrix for House RBAC + +| Role | Kitchen | Basement | Office | Bathroom | Laundry | Bedroom | +| -------- | :-----: | :------: | :----: | :------: | :-----: | :-----: | +| Neighbor | ✅ | | ✅ | ✅ | | | +| Plumber | ✅ | ✅ | | ✅ | ✅ | | +| Owner | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | + +#### RBAC Example: Blog + +In our Blog example anyone can view Posts (authenticated or not). They are _public_. + +- Authors can write new Posts. +- Editors can update them. +- Publishers can write, review, edit and delete Posts. +- And admins can do it all (and more). + +#### Role Matrix for Blog RBAC + +| Role | View | New | Edit | Delete | Manage Users | +| --------- | :---: | :---: | :---: | :----: | :----------: | +| Author | ✅ | ✅ | | | | +| Editor | ✅ | | ✅ | | | +| Publisher | ✅ | ✅ | ✅ | ✅ | | +| Admin | ✅ | ✅ | ✅ | ✅ | ✅ | + +## Auth and RBAC Checklist + +In order to integrate RBAC in a RedwoodJS app, you will have to: + +- Implement an Identity as a Service/Authentication Provider +- Define and Assign Roles +- Set Roles to Current User +- Enforce Access +- Secure Web and Api sides + +Helps to be familiar with [Blog Tutorial](../tutorial/foreword.md) as well as pages, cells, services, authentication, and routes. + +## Identity as a Service + +> "Doing authentication correctly is as hard, error-prone, and risky as rolling your own encryption." + +Developers no longer need to be responsible for developing their own identity service. The identity service manages authentication and the complexity associated. + +RedwoodJS generates Authentication Providers for several common Identity Services. + +Some offer RBAC support natively together with a UI to manage users and role assignment. + +- Netlify Identity +- Auth0 + +In other cases, you can still use an Identity Service such as: + +- Magic.link +- Custom + +However, in these cases you must provide the `currentUser.roles` information directly, such as from a User to Role database table or other source. + +### Netlify Identity Access Token (JWT) & App Metadata + +The following is a brief example of a **decoded** JSON Web Token (JWT) similar to that issued by Netlify Identity. + +There are the following standard claims: + +- `exp`: When the token expires. +- `sub`: The token's subject, in this case the user identifier. + +Other common claims are `iss` for issuer and `aud` for audience (ie, the recipient for which the JWT is intended). + +Please see [Introduction to JSON Web Tokens](https://jwt.io/introduction/) for a complete discussion. + +This decoded token also includes: + +- `app_metadata`: Stores information (such as, support plan subscriptions, security roles, or access control groups) that can impact a user's core functionality, such as how an application functions or what the user can access. Data stored in app_metadata cannot be edited by users +- `user_metadata`: Stores user attributes such as preferences that do not impact a user's core functionality. Logged in users can edit their data stored in user_metadata typically by making an api call the Identity service user profile endpoint with their access_token to identify themselves. + +Roles may be stored within `app_metadata` or sometimes within `authorization` under `app_metadata`. + +```json +{ + "exp": 1598628532, + "sub": "1d271db5-f0cg-21f4-8b43-a01ddd3be294", + "email": "example+author@example.com", + "app_metadata": { + "roles": ["author"] + }, + "user_metadata": { + "full_name": "Arthur Author", + } +} +``` + +## How To Code Examples + +### Set Roles to Current User + +Roles may be stored within `app_metadata` or sometimes within `authorization` under `app_metadata`. + +The `parseJWT` helper will consider both locations to extract roles on the decoded JWT. + +```javascript title="api/lib/auth.js" +import { parseJWT } from '@redwoodjs/api' + +export const getCurrentUser = async (decoded) => { + return context.currentUser || { ...decoded, roles: parseJWT({ decoded }).roles } +} +``` + +#### Roles from a Database + +If your AuthProvider does not set the role information in the token, you can query roles from a database table. + +Consider the following schema where a `User` has many `UserRoles`. + +```javascript +model User { + id Int @id @default(autoincrement()) + uuid String @unique + createdAt DateTime @default(now()) + updatedAt DateTime @default(now()) + userRoles UserRole[] +} + +model UserRole { + id Int @id @default(autoincrement()) + createdAt DateTime @default(now()) + updatedAt DateTime @default(now()) + name String + user User? @relation(fields: [userId], references: [id]) + userId Int? + + @@unique([name, userId]) +} +``` + +You can have seeded the `User` and `UserRole` tables with a new User that has a `uuid` from your identity service and also assigned that user a role of `editor`: + +```javascript +const uuid = '1683d760-5b4d-2ced-a078-23fdfebe2e19' + +const newUser = await db.user.create({ + data: { uuid }, +}) + +const userRole = await db.userRole.create({ + data: { + name: 'editor', + user: { + connect: { uuid }, + }, + }, +}) +``` + +Given that your decoded JWT `sub` claim will contain the `uuid`, you can fetch the roles by querying the `UserRoles` table and join in on the `User` via its `uuid`. + +Once you have the `UserRole`s, then you can set an array of their `name`s on the `currentUser`. + +```javascript title="api/lib/auth.js" +export const getCurrentUser = async (decoded) => { + const userRoles = await db.userRole.findMany({ + where: { user: { uuid: decoded.sub } }, + select: { name: true }, + }) + + const roles = userRoles.map((role) => { + return role.name + }) + + return context.currentUser || { roles } +} +``` + +### Web-side RBAC + +- useAuth() hook +- hasRole also checks if authenticated. + +* Routes +* NavLinks in a Layout +* Cells/Components +* Markup in Page + +#### How to Protect a Route + +To protect a `PrivateSet` route for access by a single role: + +```jsx +import { Router, Route, PrivateSet } from '@redwoodjs/router' + +const Routes = () => { + return ( + + + + + + ) +} +``` + +To protect a `PrivateSet` route for access by a multiple roles: + +```jsx +import { Router, Route, PrivateSet } from '@redwoodjs/router' + +const Routes = () => { + return ( + + + + + + ) +} +``` + +> Note: If you are using `Set` you can use its `private` attribute instead of the `` component. + +If the currentUser is not assigned the role, they will be redirected to the page specified in the `unauthenticated` property. Therefore, you can define a specific page to be seen when attempting to access the protected route and denied access such as a "forbidden" page: + +```jsx +import { Router, Route, PrivateSet } from '@redwoodjs/router' + +const Routes = () => { + return ( + + + + + + + + + + ) +} +``` + +#### How to Protect a NavLink in a Layout + +A `NavLink` is a specialized `Link` used for navigation or menu links that is styled differently when the current route is active. + +To protect the `NavLink` for access by a single role: + +```jsx +import { NavLink, Link, routes } from '@redwoodjs/router' +import { useAuth } from '@redwoodjs/auth' + +const SidebarLayout = ({ children }) => { + const { hasRole } = useAuth() + + return ( + ... + {hasRole('admin') && ( + + Manage Users + + ... + )} + ) +} +``` + +To protect the `NavLink` for access by multiple roles: + +```jsx +import { NavLink, Link, routes } from '@redwoodjs/router' +import { useAuth } from '@redwoodjs/auth' + +const SidebarLayout = ({ children }) => { + const { hasRole } = useAuth() + + return ( + ... + {hasRole(['admin', 'author', 'editor', 'publisher']) && ( + + Manage Posts + + ... + )} + ) +} +``` + +Note that `hasRole()` also checks if the currentUser is authenticated. + +#### How to Protect a Component + +To protect content in a `Component` for access by a single role: + +```jsx +import { useAuth } from '@redwoodjs/auth' + +const Post = ({ post }) => { + const { hasRole } = useAuth() + + return ( + + ) +} +``` + +To protect content in a `Component` for access by multiple roles: + +```jsx +import { useAuth } from '@redwoodjs/auth' + +const Post = ({ post }) => { + const { hasRole } = useAuth() + + return ( + + ) +} +``` + +Note that `hasRole()` also checks if the currentUser is authenticated. + +#### How to Protect Markup in a Page + +To protect markup in a `Page` for access by a single role: + +```jsx +import { useAuth } from "@redwoodjs/auth"; +import SidebarLayout from "src/layouts/SidebarLayout"; + +const SettingsPage = () => { + const { isAuthenticated, userMetadata, hasRole } = useAuth(); + + return ( + {isAuthenticated && ( +
    + {hasRole("admin") && ( + + Edit on Netlify + + )} +
    + )} + )} +} +``` + +To protect markup in a `Page` for access by multiple roles: + +```jsx +import { useAuth } from "@redwoodjs/auth"; +import SidebarLayout from "src/layouts/SidebarLayout"; + +const SettingsPage = () => { + const { isAuthenticated, userMetadata, hasRole } = useAuth(); + + return ( + {isAuthenticated && ( +
    + {hasRole(["admin", "userManager"]) && ( + + Edit on Netlify + + )} +
    + )} + )} +} +``` + +Note that `hasRole()` also checks if the currentUser is authenticated. + +### Api-side RBAC + +- Example `requireAuth()` +- Services +- Functions +- Default Roles using [Netlify Identity Triggers](https://docs.netlify.com/functions/trigger-on-events/) + +#### Example `requireAuth()` + +Use `requireAuth()` in your services to check that a user is logged in, whether or not they are assigned a role, and optionally raise an error if they're not. + +It checks for a single role: + +```javascript +requireAuth({ roles: 'editor' }) +``` + +or multiple roles: + +```javascript +requireAuth({ roles: ['admin', 'author', 'publisher'] }) +``` + +This function should be located in `api/src/lib/auth.js` for your RedwoodJS app (ie, where your `getCurrentUser()` is located). + +```javascript +export const requireAuth = ({ roles } = {}) => { + if (!isAuthenticated()) { + throw new AuthenticationError("You don't have permission to do that.") + } + + if (roles && !hasRole(roles)) { + throw new ForbiddenError("You don't have access to do that.") + } +} +``` + +#### How to Protect a Service + +```javascript +import { db } from 'src/lib/db' +import { requireAuth } from 'src/lib/auth' + +const CREATE_POST_ROLES = ['admin', 'author', 'publisher'] + +export const createPost = ({ input }) => { + requireAuth({ role: CREATE_POST_ROLES }) + + return db.post.create({ + data: { + ...input, + authorId: context.currentUser.sub, + publisherId: context.currentUser.sub, + }, + }) +} +``` + +#### How to Protect a Function + +Since `requireAuth()` raises an exception, catch and return a `HTTP 401 Unauthorized` or `HTTP 403 Forbidden` client error status response code. + +```javascript +import { requireAuth } from 'src/lib/auth' +import { AuthenticationError, ForbiddenError } from '@redwoodjs/api' + +export const handler = async (event, context) => { + try { + requireAuth({ roles: 'admin' }) + + return { + headers: { + 'Content-Type': 'application/json', + }, + statusCode: 200, + body: JSON.stringify({ + data: 'Permitted', + }), + } + } catch (e) { + if (e instanceof AuthenticationError) { + return { + statusCode: 401, + } + } else if (e instanceof ForbiddenError) { + return { + statusCode: 403, + } + } else { + return { + statusCode: 400, + } + } + } +} +``` + +#### How to Default Roles on Signup using Netlify Identity Triggers + +You can trigger serverless function calls when certain Identity events happen, like when a user signs up. + +Netlify Identity currently supports the following events: + +- `identity-validate`: Triggered when an Identity user tries to sign up via Identity. +- `identity-signup`: Triggered when an Identity user signs up via Netlify Identity. (Note: this fires for only email+password signups, not for signups via external providers e.g. Google/GitHub) +- `identity-login`: Triggered when an Identity user logs in via Netlify Identity + +To set a serverless function to trigger on one of these events, match the name of the function file to the name of the event. For example, to trigger a serverless function on identity-signup events, name the function file `identity-signup.js`. + +If you return a status other than 200 or 204 from one of these event functions, the signup or login will be blocked. + +If your serverless function returns a 200, you can also return a JSON object with new user_metadata or app_metadata for the Identity user. + +```javascript title="api/src/functions/identity-signup.js" +export const handler = async (req, _context) => { + const body = JSON.parse(req.body) + + const eventType = body.event + const user = body.user + const email = user.email + + let roles = [] + + if (eventType === 'signup') { + if (email.includes('+author')) { + roles.push('author') + } + + if (email.includes('+editor')) { + roles.push('editor') + } + + if (email.includes('+publisher')) { + roles.push('publisher') + } + + return { + headers: { + 'Content-Type': 'application/json', + }, + statusCode: 200, + body: JSON.stringify({ app_metadata: { roles: roles } }), + } + } else { + return { + statusCode: 200, + } + } +} +``` + +#### How to invoke serverless functions while in dev + +So long as `yarn rw dev` is running, `netlify-cli` can be used to invoke your function. Steps are: + +```bash +# Install the cli +yarn add netlify-cli -g + +# Rebuild api after any changes to /functions +yarn rw build api + +# Invoke your function with the CLI, pointing it to the rw dev port +netlify functions:invoke --port 8910 +``` + +`` should be replaced by `identity-validate`, `identity-signup`, `identity-login` or your own function. + +Note that the netlify-cli does not generate fake user data for each invocation of an identity function. It always provides the same `Test Person` data. + +## Additional Resources + +- [RBAC Example & Demo Site](https://redwoodblog-with-identity.netlify.app/) +- [RBAC Example & Demo Site GitHub Repo](https://github.com/dthyresson/redwoodblog-rbac) +- [Netlify Identity](https://docs.netlify.com/visitor-access/identity/) +- [Netlify Identity Triggers](https://docs.netlify.com/functions/trigger-on-events/) +- [JSON Web Tokens (JWT)](https://jwt.io/) +- [5 Massive Benefits Of Identity As A Service](https://auth0.com/blog/5-massive-benefits-of-identity-as-a-service-for-developers/) diff --git a/docs/versioned_docs/version-7.0/how-to/self-hosting-redwood.md b/docs/versioned_docs/version-7.0/how-to/self-hosting-redwood.md new file mode 100644 index 000000000000..870b61ef84a9 --- /dev/null +++ b/docs/versioned_docs/version-7.0/how-to/self-hosting-redwood.md @@ -0,0 +1,164 @@ +# Self-hosting Redwood (Serverful) +:::warning + +This doc has been deprecated in favor of the [Baremetal](../deploy/baremetal.md) docs. + +::: + +Do you prefer hosting Redwood on your own server, the traditional serverful way, instead of all this serverless magic? Well, you can! In this recipe we configure a Redwood app with PM2 and Nginx on a Linux server. + +> A code example can be found at https://github.com/njjkgeerts/redwood-pm2, and can be viewed live at http://redwood-pm2.nickgeerts.com. + +## Requirements + +You should have some basic knowledge of the following tools: + +- [PM2](https://pm2.keymetrics.io/docs/usage/pm2-doc-single-page/) +- [Nginx](https://nginx.org/en/docs/) +- Linux +- [Postgres](https://www.postgresql.org/docs/) + +## Configuration + +To self-host, you'll have to do a bit of configuration both to your Redwood app and your Linux server. + +### Adding Dependencies + +First add PM2 as a dev dependency to your project root: + +```termninal +yarn add -D pm2 +``` + +Then create a PM2 ecosystem configuration file. For clarity, it's recommended to rename `ecosystem.config.js` to something like `pm2.config.js`: + +```bash +yarn pm2 init +mv ecosystem.config.js pm2.config.js +``` + +Last but not least, change the API endpoint in `redwood.toml`: + +```diff +- apiUrl = "/.redwood/functions" ++ apiUrl = "/api" +``` + +Optionally, add some scripts to your top-level `package.json`: + +```json +"scripts": { + "deploy:setup": "pm2 deploy pm2.config.js production setup", + "deploy": "pm2 deploy pm2.config.js production deploy" +} +``` + +We'll refer to these later, so even if you don't add them to your project, keep them in mind. + +### Linux server + +Your Linux server should have a user for deployment, configured with an SSH key providing access to your production environment. In this example, the user is named `deploy`. + +### Nginx + +Typically, you keep your Nginx configuration file at `/etc/nginx/sites-available/redwood-pm2` and symlink it to `/etc/nginx/sites-enabled/redwood-pm2`. It should look something like this: + +```nginx {10} +server { + server_name redwood-pm2.example.com; + listen 80; + + location / { + root /home/deploy/redwood-pm2/current/web/dist; + try_files $uri /index.html; + } + + location /api/ { + proxy_pass http://localhost:8911/; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection 'upgrade'; + proxy_set_header Host $host; + proxy_cache_bypass $http_upgrade; + } +} +``` + +Please note that the trailing slash in `proxy_pass` is essential to correctly map the API functions. + +### PM2 + +Let's configure PM2 with the `pm2.config.js` file we made earlier. The most important variables are at the top. Note that the port is only used locally on the server and should match the port in the Nginx config: + +```javascript +const name = 'redwood-pm2' // Name to use in PM2 +const repo = 'git@github.com:njjkgeerts/redwood-pm2.git' // Link to your repo +const user = 'deploy' // Server user +const path = `/home/${user}/${name}` // Path on the server to deploy to +const host = 'example.com' // Server hostname +const port = 8911 // Port to use locally on the server +const build = `yarn install && yarn rw build && yarn rw prisma migrate deploy` + +module.exports = { + apps: [ + { + name, + node_args: '-r dotenv/config', + cwd: `${path}/current/`, + script: 'yarn rw serve api', + args: `--port ${port}`, + env: { + NODE_ENV: 'development', + }, + env_production: { + NODE_ENV: 'production', + }, + }, + ], + + deploy: { + production: { + user, + host, + ref: 'origin/master', + repo, + path, + ssh_options: 'ForwardAgent=yes', + 'post-deploy': `${build} && pm2 reload pm2.config.js --env production && pm2 save`, + }, + }, +} +``` + +If you need to seed your production database during your first deployment, `yarn redwood prisma migrate dev` will do that for you. + +> **Caveat:** the API seems to only work in fork mode in PM2, not [cluster mode](https://pm2.keymetrics.io/docs/usage/cluster-mode/). + +## Deploying + +First, we need to create the PM2 directories: + +```bash +yarn install +yarn deploy:setup +``` + +Your server directories are now set, but we haven't configured the `.env` settings yet. SSH into your server and create an `.env` file in the `current` subdirectory of the deploy directory: + +```bash +vim /home/deploy/redwood-pm2/current/.env +``` + +For example, add a `DATABASE_URL` variable: + +```env +DATABASE_URL=postgres://postgres:postgres@localhost:5432/redwood-pm2 +``` + +Now we can deploy the app! Just run the following; it should update the code, take care of database migrations, and restart the app in PM2: + +```bash +yarn deploy +``` + +Enjoy! 😁 diff --git a/docs/versioned_docs/version-7.0/how-to/sending-emails.md b/docs/versioned_docs/version-7.0/how-to/sending-emails.md new file mode 100644 index 000000000000..afeb4580fca5 --- /dev/null +++ b/docs/versioned_docs/version-7.0/how-to/sending-emails.md @@ -0,0 +1,389 @@ +# Sending Emails + +Something a lot of applications will eventually have to do is send emails. To demonstrate how you can do that with RedwoodJS we're going to build a simple list of users and their email addresses, and allow you to trigger an email to them. We'll also include some auditing features, so you get a history of emails you sent to your users. The audit logs will be implemented by using one service from within another service — a powerful RedwoodJS feature. + +The emails will be sent using the npm package [nodemailer](https://www.npmjs.com/package/nodemailer) together with [SendInBlue](https://sendinblue.com). + +## Setup + +The first thing to do is to create a new RedwoodJS project. + +```zsh +yarn create redwood-app --typescript email +``` + +When that's done, go into the `email` directory and install the `nodemailer` package. + +```zsh +yarn workspace api add nodemailer +``` + +### DB design + +Now, fire up your editor of choice and find the `schema.prisma` file and remove the example model. The app we're building is going to have two models. One for our users and one for the audit logs. Paste the following two models in your schema file. + +```graphql +model User { + id String @id @default(uuid()) + createdAt DateTime @default(now()) + updatedAt DateTime @default(now()) @updatedAt + email String @unique + name String? + audits Audit[] +} + +model Audit { + id String @id @default(uuid()) + createdAt DateTime @default(now()) + updatedAt DateTime @default(now()) @updatedAt + userId String + user User @relation(fields: [userId], references: [id]) + log String +} +``` + +Technically all we really need in the User model is the email address and the Audit relation field. But personally I have never regretted having an id, and the two timestamps in my models. But I _have_ regretted _not_ having them, having to go back to add them later. So now I always include them from the start. And I also added a `name` field to the user, to make this example at least a little bit realistic 😁. A proper user model would most likely have way more fields. The audit model is also overly simplistic. Especially the single `log` string. A proper audit trail needs way more info. But for demo purposes it's good enough. Final thing I wanted to mention was the relation. We set up a one-to-many relation from the user to the audit logs so that we can easily find all logs belonging to a user by simply following the relation. + +Now we can go ahead and migrate our database and create the SDLs and services needed to interact with the Prisma model using GraphQL. + +```zsh +yarn rw prisma migrate dev --name email +``` + +### Scaffold + +One of Redwood's stand-out features is the scaffolds. We'll be using scaffolds here to quickly get a nice visual list of the users in our database to work with. + +```zsh +yarn rw g scaffold User +``` + +Let's do it for Audit as well + +```zsh +yarn rw g scaffold Audit +``` + +Now let's run the Redwood dev server to see what we've created so far. + +```zsh +yarn rw dev +``` + +Your web browser should open up and show the default Redwood app home page with a list of links to all your pages. Click on the `/users` link and then go ahead and create a few users. Since we're going to send emails to these users, use emails you can actually check. So you can make sure it works. A service I like to use for generating random users with real email addresses is https://www.fakenamegenerator.com. Just click the link on that page to activate the email address and you'll be able to send emails from your app, and see them arrive. + +So if you create three users you should see something like this + +![Screenshot showing list scaffolded list of users, with three example users](https://user-images.githubusercontent.com/30793/150651281-051d49d0-659c-481c-bed3-17a629d290e4.png) + +Clicking to show the details on one of the users you should see a page similar to what I have below here. To that page I've also added a button to send an email to the user. I'll show you how next! + +![Detailed view of single user, with button to send email](https://user-images.githubusercontent.com/30793/150651287-258e923e-9446-4bde-8e9c-c81275b8590c.png) + +### Button to send email + +To add our button, and the actions connected to it, we need to add a fair bit of code to the User component. I've put the full code below to make sure you don't miss anything. + +```tsx title="src/components/User/User.tsx" +import { useMutation } from '@redwoodjs/web' +import { toast } from '@redwoodjs/web/toast' +import { Link, routes, navigate } from '@redwoodjs/router' + +const DELETE_USER_MUTATION = gql` + mutation DeleteUserMutation($id: String!) { + deleteUser(id: $id) { + id + } + } +` + +const EMAIL_USER_MUTATION = gql` + mutation EmailUserMutation($id: String!) { + emailUser(id: $id) { + id + } + } +` + +const timeTag = (datetime) => { + return ( + + ) +} + +const User = ({ user }) => { + const [deleteUser] = useMutation(DELETE_USER_MUTATION, { + onCompleted: () => { + toast.success('User deleted') + navigate(routes.users()) + }, + onError: (error) => { + toast.error(error.message) + }, + }) + + const [emailUser] = useMutation(EMAIL_USER_MUTATION, { + onCompleted: () => { + toast.success('Email sent') + }, + onError: (error) => { + toast.error(error.message) + }, + }) + + const onDeleteClick = (id) => { + if (confirm('Are you sure you want to delete user ' + id + '?')) { + deleteUser({ variables: { id } }) + } + } + + const onEmailClick = (user) => { + if (confirm(`Are you sure you want to send an email to ${user.name}?`)) { + emailUser({ variables: { id: user.id } }) + } + } + + return ( + <> +
    +
    +

    + User {user.id} Detail +

    +
    + + + + + + + + + + + + + + + + + + + + + + + +
    Id{user.id}
    Created at{timeTag(user.createdAt)}
    Updated at{timeTag(user.updatedAt)}
    Email{user.email}
    Name{user.name}
    +
    + + + ) +} + +export default User +``` + +We're using a GraphQL mutation here to trigger the sending of the email. To make that mutation work we need to add it to the users SDL. + +```ts title="users.sdl.ts" +export const schema = gql` + // ... + + type Mutation { + // ... + + emailUser(id: String!): User! @requireAuth + } +` +``` + +And then in the users service we'll just create a dummy method to start with. + +```ts title="users.ts" +// ... + +import type { Prisma } from '@prisma/client' + +// ... + +export const emailUser = async ({ id }: Prisma.UserWhereUniqueInput) => { + const user = await db.user.findUnique({ + where: { id }, + }) + + console.log('Sending email to', user) + + return user +} + +// ... +``` + +Now is a good time to go get a fresh cup of coffee, or other beverage of choice. When you come back we'll create an account at [SendInBlue](https://www.sendinblue.com) and use the credentials from there to send an email. + +## SendInBlue + +To actually send an email you need a mail server that you can talk to using SMTP. `nodemailer` has a really [simple example](https://nodemailer.com/about/#example) on their webpage that uses Ethereal. But that's only for test messages. The emails will never actually be delivered beyond Ethereal. Another option is to use your own GMail address (if you have one). But to get that working reliably you need to set up OAuth2, which isn't very straight forward. So your best bet here is actually to use a dedicated Cloud/SaaS solution. A lot of them have a free tier that lets you send enough emails for a small production app. We'll be using SendInBlue that offers 300 free emails per day. + +So go ahead and create an account with SendInBlue. They'll ask for an address and a phone number. They need it to prevent users from creating accounts to send spam emails from. When your account is created and set up you need to click on the menu in the upper right with your company name and select the "SMTP & API" option. + +![SendInBlue top right menu](https://user-images.githubusercontent.com/30793/150651291-21f5a7bd-6148-4cfe-97a1-2e9c3cab2d81.png) + +Then click on "SMTP" + +![SendInBlue SMTP tab-bar option](https://user-images.githubusercontent.com/30793/150651295-929e671a-da38-46ab-937c-a976b23a0fa0.png) + +Finally you need to generate a new SMTP key. Name it whatever you want, doesn't matter. You should get a dialog that looks like the screenshot below. Copy your key. + +![SendInBlue SMTP key dialog](https://user-images.githubusercontent.com/30793/150651301-523750b3-7732-4a15-bc0e-746811a4bb20.png) + +Now switch to your code editor and open the `.env` file. At the bottom, on a new row, create a new environment variable called SEND_IN_BLUE_KEY. It should look like this, but with your unique key. + +``` +SEND_IN_BLUE_KEY=xsmtpsib-7fa6eb37c244429933ea870185063c493ba1c820f826c5f620877dd815392602-rZgB6GUV1CF2NLAK +``` + +That's it for SendInBlue. It's set up, and you have the key you need to send emails. If you have your dev server still running, you need to restart it for the new environment variable to be picked up. + +## Sending an email + +Now let's write the function that'll fire off the email. On the api side, in the `lib` folder, create a new file named `email.ts`. Paste this code in the file + +```ts title="email.ts" +import * as nodemailer from 'nodemailer' + +interface Options { + to: string | string[] + subject: string + text: string + html: string +} + +export async function sendEmail({ to, subject, text, html }: Options) { + console.log('Sending email to:', to) + + // create reusable transporter object using SendInBlue for SMTP + const transporter = nodemailer.createTransport({ + host: 'smtp-relay.sendinblue.com', + port: 587, + secure: false, // true for 465, false for other ports + auth: { + user: 'your@email.com', + pass: process.env.SEND_IN_BLUE_KEY, + }, + }) + + // send mail with defined transport object + const info = await transporter.sendMail({ + from: '"Your Name" ', + to: Array.isArray(to) ? to : [to], // list of receivers + subject, // Subject line + text, // plain text body + html, // html body + }) + + return info +} +``` + +In the code above you should replace "your@email.com" in two places with the email you used when signing up for SendInBlue. You can also change the name used for "From:". Note: Remember to use the email address as it is shown in the SendInBlue website, it is case sensitive. + +Now let's go back to the users service and add the missing pieces there. At the top, after the db import, add the `sendEmail` import + +```ts title="users.ts" +// ... + +import { sendEmail } from 'src/lib/email' + +// ... +``` + +Then paste this function somewhere in the file + +```ts title="users.ts" +// ... + +function sendTestEmail(emailAddress: string) { + const subject = 'Test Email' + const text = + 'This is a manually triggered test email.\n\n' + + 'It was sent from a RedwoodJS application.' + const html = + 'This is a manually triggered test email.

    ' + + 'It was sent from a RedwoodJS application.' + return sendEmail({ to: emailAddress, subject, text, html }) +} + +// ... +``` + +Finally, replace the `console.log` we left earlier with this code + +```ts title="users.ts" +// ... + +await sendTestEmail(user.email) + +// ... +``` + +You can now test your app's new email sending capabilities by clicking on the email button you added previously. You should see a "Sending email to: horacebcarrier@teleworm.us" message in your terminal, and a few minutes later it should pop up in the users email inbox. (If you're using the email addresses generated by fakenamegenerator you need to be patient, it does take a while before you can see new emails arriving.) + +## Using one service from another service + +The final thing to add is the auditing. When the users service sends an email we want to call the audits service to add a new audit log entry. Redwood makes this really easy. All you have to do is import the service and you can use all the functions it exports! + +One thing I wanted to note here is that this might bypass security measures you have in place. When you call a service from the web side of your project you use GraphQL and the service is then protected by the `@requireAuth` directive. If you have a service that's open for everyone (i.e. that uses `@skipAuth`) and that service imports and uses another service it will be allowed to call any function in there, no matter what directives they use on the graphql side of things. In our case the `emailUser` mutation is using `@requireAuth`, so we're not affected by this. + +With that little PSA out of the way, let's make this auditing stuff happen! + +```ts title="users.ts" +// ... + +import { createAudit } from '../audits/audits' + +// ... + +export const emailUser = async ({ id }: Prisma.UserWhereUniqueInput) => { + // ... + + await sendTestEmail(user.email) + await createAudit({ + input: { userId: id, log: 'Admin sent test email to user' }, + }) + + // ... +} + +// ... +``` + +That's it! We just import the audits service and call the exported `createAudit` function. The syntax for the argument object that is passed to `createAudit` might not be super obvious, but the TypeScript types help a lot with how it should be structured! What we're doing is we're connecting this new audit log with an existing user, and setting the log message. The audit entries will automatically get a timestamp (and a generated id). + +To view the audit logs you can use the scaffolded pages we created earlier. Just navigate to http://localhost:8910/audits and you should see them there. + +Thanks for reading this! If you liked it, or have any questions, don't hesitate to reach out on [our forums](https://community.redwoodjs.com) or in our [Discord chat](https://discord.gg/jjSYEQd). diff --git a/docs/versioned_docs/version-7.0/how-to/supabase-auth.md b/docs/versioned_docs/version-7.0/how-to/supabase-auth.md new file mode 100644 index 000000000000..f5425b735daa --- /dev/null +++ b/docs/versioned_docs/version-7.0/how-to/supabase-auth.md @@ -0,0 +1,685 @@ +# Supabase Auth + +Let's call this how to a port of the [Redwood GoTrue Auth how to](gotrue-auth.md) to [Supabase](https://supabase.io/). +I won't get original style points because I copy-pasted (and updated, for good measure) the original. +Why? Because Supabase auth is based on [Netlify GoTrue](https://github.com/netlify/gotrue), an API service for handling user registration and authentication. The Supabase folks build on solid open-source foundations. + +Once I connected these dots, the Redwood GoTrue Auth how to became a handy resource as I climbed the auth learning curve (and I started from sea level). Hopefully this Supabase-specific edition will help you climb your own too. + +## Time to Cook + +In this recipe, we'll: + +- Configure a Redwood app with Supabase auth +- Create a Sign Up form, a Sign In form, and a Sign Out button +- Add auth links that display the correct buttons based on our auth state + +But first, some housekeeping... + +## Prerequisites + +Before getting started, there are a few steps you should complete: + +- [Create a Redwood app](../tutorial/chapter1/installation.md) +- [Create a Supabase account](https://www.supabase.io/) +- [Go through the Supabase React Quick Start](https://supabase.io/docs/guides/with-react) +- [Go through the Supabase Redwood Quick Start](https://supabase.io/docs/guides/with-redwoodjs) +- Fire up a dev server: `yarn redwood dev` + +### About the Supabase Quick Starts + +Why the React Quick Start before the Redwood? I found it helpful to first interact directly with the [Supabase Client](https://github.com/supabase/supabase-js). Eventually, you'll use the [Redwood Auth wrapper](../authentication.md#supabase), which provides a level of abstraction and a clean, consistent style. But I needed a couple hours of direct client experimentation to gain comfort in the Redwood one. + +So, just this once, I hereby give you permission to fire-up Create React App as you follow-along the Supabase React Quick Start. I worked through it first. Then I worked through the Supabase Redwood Quick start, observing the slight differences. This helped me understand the details that the Redwood wrapper abstracts for us. + +> **Auth Alphabet Soup** +> +> If you're like me—and I'm pretty sure I'm just human—you may find yourself spinning in jumbled auth jargon. Hang in there, you'll get your auth ducks lined up eventually. +> +> I'm proud to tell you that I now know that the Redwood Supabase auth client wraps the Supabase GoTrueJS client, which is a fork of Netlify’s GoTrueJS client (which is different from Netlify Identity). And dbAuth is a totally separate auth option. Plus, I'll keep it simple and not use RBAC at the moment. +> +> Ahhh! It took me a few weeks to figure this out. + +## Back to Redwood + +Armed with some knowledge and insight from going through the Supabase Quick Starts, let's head back to the Redwood app created as part of the prerequisites. + +Start by installing the required packages and generating boilerplate for Redwood Auth, all with this simple [CLI command](../cli-commands.md#setup-auth): + +```bash +yarn redwood setup auth supabase +``` + +By specifying `supabase` as the provider, Redwood automatically added the necessary Supabase config to our app. Let's open up `web/src/App.[js/tsx]` and inspect. You should see: + +```jsx {1-2,12,17} title="web/src/App.[js/tsx]" +import { AuthProvider } from '@redwoodjs/auth' +import { createClient } from '@supabase/supabase-js' + +import { FatalErrorBoundary, RedwoodProvider } from '@redwoodjs/web' +import { RedwoodApolloProvider } from '@redwoodjs/web/apollo' + +import FatalErrorPage from 'src/pages/FatalErrorPage' +import Routes from 'src/Routes' + +import './index.css' + +const supabaseClient = createClient(process.env.SUPABASE_URL, process.env.SUPABASE_KEY) + +const App = () => ( + + + + + + + + + +) + +export default App +``` + +Now it's time to add the Supabase URL, public API KEY, and JWT SECRET (`SUPABASE_URL`, `SUPABASE_KEY`, and `SUPABASE_JWT_SECRET`) to your `.env` file. +You can find these items in your Supabase management console, under **Settings > API**: + +![Supabase console screen shot](https://user-images.githubusercontent.com/43206213/146407575-71ad2c94-8fa6-48d2-a403-d249f75569ea.png) + +Here's a `.env` example: + +```bash +# .env (in your root project directory) + +SUPABASE_URL=https://replacewithyoursupabaseurl.supabase.co +SUPABASE_KEY=eyJhb_replace_VCJ9.eyJy_with_your_wfQ.0Abb_anon_key_teLJs +SUPABASE_JWT_SECRET=eyJh_replace_CJ9.eyJy_with_your_NTQwOTB9.MGNZN_JWT_secret_JgErqxj4 +``` + +That's (almost) all for configuration. + +## Sign Up + +Sign Up feels like an appropriate place to start building our interface. +Our first iteration won't include features like email confirmation or password recovery. +To forgo email confirmation, turn off "Enable email confirmations" on your Supabase management console, found under `Authentication > Settings`: + +![Supabase email confirmation toggle](https://user-images.githubusercontent.com/43206213/147164458-1b6723ef-d7dd-4c7c-b228-73ca4ba7b1ff.png) + +_Now_ we're done with configuration. Back to our app... + +## The Sign Up Page + +Let's generate a Sign Up page: + +```bash +yarn redwood generate page signup +``` + +This adds a Sign Up [route](../router.md) to our routes file and creates a `SignupPage` component. + +In the just-generated `SignupPage` component (`web/src/pages/SignupPage/SignupPage.[js/tsx]`), let's import some [Redwood Form components](../forms.md) and make a very basic form: + +```jsx title="web/src/pages/SignupPage/SignupPage.[js/tsx]" +import { Form, TextField, PasswordField, Submit } from '@redwoodjs/forms' + +const SignupPage = () => { + return ( + <> +

    Sign Up

    +
    + + + Sign Up + + + ) +} + +export default SignupPage +``` + +Did I mention it was basic? If you want to add some polish, you might find both the [Redwood Form docs](../forms.md) and the [tutorial section on forms](../tutorial/chapter3/forms.md) quite useful. For our purposes, let's just focus on the functionality. + +Now that we have a form interface, we're going to want to do something when the user submits it. Let's add an `onSubmit` function to our component and pass it as a prop to our Form component: + +```jsx {4-6,11} title="web/src/pages/SignupPage/SignupPage.[js/tsx]" +// ... + +const SignupPage = () => { + const onSubmit = (data) => { + // do something here + } + + return ( + <> +

    Sign Up

    +
    + + + Sign Up + + + ) +} + +//... +``` + +The _something_ we need to do is—surprise!—sign up. To do this, we'll need a way to communicate with `` and the Supabase GoTrue-JS client we passed to it. Look no further than the [`useAuth` hook](../authentication.md#api), which lets us subscribe to our auth state and its properties. In our case, we'll be glad to now have access to `client` and, thusly, our Supabase GoTrue-JS instance and [all of its functions](https://github.com/supabase/supabase-js). + +Let's import `useAuth` and destructure `client` from it in our component: + +```jsx {2,5} title="web/src/pages/SignupPage/SignupPage.js" +import { Form, TextField, PasswordField, Submit } from '@redwoodjs/forms' +import { useAuth } from '@redwoodjs/auth' + +const SignupPage = () => { + const { client } = useAuth() + const onSubmit = (data) => { + // do something here + } + + return ( + <> +

    Sign Up

    +
    + + + Sign Up + + + ) +} + +export default SignupPage +``` + +And now we'll attempt to create a new user in the `onSubmit` function with [`client.auth.signUp()`](https://supabase.io/docs/reference/javascript/auth-signup) by passing the `email` and `password` values that we captured from our form: + +```jsx {8-16} title="web/src/pages/SignupPage/SignupPage.[js/tsx]" +import { Form, TextField, PasswordField, Submit } from '@redwoodjs/forms' +import { useAuth } from '@redwoodjs/auth' + +const SignupPage = () => { + const { client } = useAuth() + + const onSubmit = async (data) => { + try { + const response = await client.auth.signUp({ + email: data.email, + password: data.password + }) + console.log('response: ', response) + } catch(error) { + console.log('error: ', error) + } + } + + return ( + <> +

    Sign Up

    +
    + + + Sign Up + + + ) +} +export default SignupPage +``` + +Presently, our sign up works as is, but simply console-logging the response from `client.auth.signup()` is hardly useful behavior. + +Let's display errors to the user if there are any. To do this, we'll set up `React.useState()` to manage our error state and conditionally render the error message. We'll also want to reset the error state at the beginning of every submission with `setError(null)`: + +```jsx {6,9,16,18,26} title="web/src/pages/SignupPage/SignupPage.js" +import { Form, TextField, PasswordField, Submit } from '@redwoodjs/forms' +import { useAuth } from '@redwoodjs/auth' + +const SignupPage = () => { + const { client } = useAuth() + const [error, setError] = React.useState(null) + + const onSubmit = async (data) => { + setError(null) + try { + const response = await client.auth.signUp({ + email: data.email, + password: data.password + }) + console.log('response: ', response) + response?.error?.message && setError(response.error.message) + } catch(error) { + setError(error.message) + } + } + + return ( + <> +

    Sign Up

    +
    + {error &&

    {error}

    } + + + Sign Up + + + ) +} +export default SignupPage +``` + +> Errors may be returned in two fashions: +> +> 1. upon promise fulfillment, within the `error` property of the object returned by the promise +> +> 2. upon promise rejection, within an error returned by the promise (you can handle this via the `catch` block) + +Now we can handle a successful submission. If we sign up without email confirmation, then successful sign up also _signs in_ the user. Once they've signed in, we'll want to redirect them back to our app. + +First, if you haven't already, [generate](../cli-commands.md#generate-page) a homepage: + +```bash +yarn redwood generate page home / +``` + +Let's import `routes` and `navigate` from [Redwood Router](../router.md#navigate) and use them to redirect to the home page upon successful sign up: + +```jsx {3,16} title="web/src/pages/SignupPage/SignupPage.js" +import { Form, TextField, PasswordField, Submit } from '@redwoodjs/forms' +import { useAuth } from '@redwoodjs/auth' +import { routes, navigate } from '@redwoodjs/router' + +const SignupPage = () => { + const { client } = useAuth() + const [error, setError] = React.useState(null) + + const onSubmit = async (data) => { + setError(null) + try { + const response = await client.auth.signUp({ + email: data.email, + password: data.password + }) + response?.error?.message ? setError(response.error.message) : navigate(routes.home()) + } catch(error) { + setError(error.message) + } + } + + return ( + <> +

    Sign Up

    +
    + {error &&

    {error}

    } + + + Sign Up + + + ) +} +export default SignupPage +``` + +Hoorah! We've just added a sign up page and created a sign up form. We created a function to sign up users and we redirect users to the home page upon successful submission. Let's move on to Sign In. + +## Sign In + +Let's get right to it. Start by [generating](../cli-commands.md#generate-page) a sign in page: + +```bash +yarn redwood generate page signin +``` + +Next we'll add a basic form with `email` and `password` fields, some error reporting, and a hollow `onSubmit` function: + +```jsx title="web/src/pages/SigninPage/SigninPage.[js/tsx]" +import { Form, TextField, PasswordField, Submit } from '@redwoodjs/forms' + +const SigninPage = () => { + const [error, setError] = React.useState(null) + + const onSubmit = (data) => { + // do sign in here + } + + return ( + <> +

    Sign In

    +
    + {error &&

    {error}

    } + + + Sign In + + + ) +} + +export default SigninPage +``` + +Then we'll need to import `useAuth` from `@redwoodjs/auth` and destructure `logIn` so that we can use it in our `onSubmit` function: + +```jsx {2,5} title="web/src/pages/SigninPage/SigninPage.js" +import { Form, TextField, PasswordField, Submit } from '@redwoodjs/forms' +import { useAuth } from '@redwoodjs/auth' + +const SigninPage = () => { + const { logIn } = useAuth() + const [error, setError] = React.useState(null) + + const onSubmit = (data) => { + setError(null) + // do sign in here + } + + return ( + <> +

    Sign In

    +
    + {error &&

    {error}

    } + + + Sign In + + + ) +} + +export default SigninPage +``` + +Now we'll add `logIn` to our `onSubmit` function. This time we'll be passing an object to our function as we're using Redwood Auth's `logIn` function directly (as opposed to `client`). This object takes an email and password. + +```jsx {10-15} title="web/src/pages/SigninPage/SigninPage.js" +import { Form, TextField, PasswordField, Submit } from '@redwoodjs/forms' +import { useAuth } from '@redwoodjs/auth' + +const SigninPage = () => { + const { logIn } = useAuth() + const [error, setError] = React.useState(null) + + const onSubmit = async (data) => { + setError(null) + try { + const response = await logIn({ email: data.email, password: data.password }) + // do something + } catch(error) { + setError(error.message) + } + } + + return ( + <> +

    Sign In

    +
    + {error &&

    {error}

    } + + + Sign In + + + ) +} + +export default SigninPage +``` + +Let's redirect our user back to the home page upon a successful login. + +In our `SigninPage`, import `navigate` and `routes` from [`@redwoodjs/router`](../router.md) and add them after awaiting `logIn`: + +```jsx {10-16} title="web/src/pages/SigninPage/SigninPage.js" +import { Form, TextField, PasswordField, Submit } from '@redwoodjs/forms' +import { useAuth } from '@redwoodjs/auth' +import { navigate, routes } from '@redwoodjs/router' + +const SigninPage = () => { + const { logIn } = useAuth() + const [error, setError] = React.useState(null) + + const onSubmit = async (data) => { + setError(null) + try { + const response = await logIn({ email: data.email, password: data.password }) + response?.error?.message ? setError(response.error.message) : navigate(routes.home()) + } catch(error) { + setError(error.message) + } + } + + return ( + <> +

    Sign In

    +
    + {error &&

    {error}

    } + + + Sign In + + + ) +} + +export default SigninPage +``` + +Well done! We've created a sign in page and form that successfully handles sign in. + +> The remainder of the how to is the same as the [Netlify GoTrue Auth](gotrue-auth.md) version. This highlights one of the fun benefits of the Redwood Auth wrappers: code specific to a certain auth implementation scheme can live in a few specific spots, as we walked through above. Then, general Redwood Auth functions can be used elsewhere in the app. + +## Sign Out + +Sign Out is by far the easiest to implement. All we need to do is call `useAuth`'s `logOut` method. + +Let's start by [generating a component](../cli-commands.md#generate-component) to house our Sign Out Button: + +```bash +yarn redwood generate component signoutBtn +``` + +In the `web/src/components/SignoutBtn/SignoutBtn.js` file we just generated, let's render a button and add a click handler: + +```jsx title="web/src/components/SignoutBtn/SignoutBtn.[js/tsx]" +const SignoutBtn = () => { + const onClick = () => { + // do sign out here. + } + return +} + +export default SignoutBtn +``` + +Now let's import `useAuth` from `@redwoodjs/auth`. We'll destructure its `logOut` method and invoke it in `onClick`: + +```jsx {1,4,7} title="web/src/components/SignoutBtn/SignoutBtn.[js/tsx]" +import { useAuth } from '@redwoodjs/auth' + +const SignoutBtn = () => { + const { logOut } = useAuth() + + const onClick = () => { + logOut() + } + return +} + +export default SignoutBtn +``` + +This works as is, but because the user may be in a restricted part of your app when they sign out, we should make sure to navigate them away from this page: + +```jsx {2,8-9} title="web/src/components/SignoutBtn/SignoutBtn.[js/tsx]" +import { useAuth } from '@redwoodjs/auth' +import { navigate, routes } from '@redwoodjs/router' + +const SignoutBtn = () => { + const { logOut } = useAuth() + + const onClick = async () => { + await logOut() + navigate(routes.home()) + } + + return +} + +export default SignoutBtn +``` + +And that's it for Sign Out! Err, of course, we're not rendering it anywhere in our app yet. In the next section, well add some navigation that conditionally renders the appropriate sign up, sign in, and sign out buttons based on our authentication state. + +## Auth Links + +In this section we'll implement some auth-related navigation that conditionally renders the correct links and buttons based on the user's authentication state: + +- when the user's logged out, we should see **Sign Up** and **Sign In** +- when the user's logged in, we should see **Log Out** + +Let's start by [generating a navigation component](../cli-commands.md#generate-component): + +```bash +yarn redwood generate component navigation +``` + +This creates `web/src/components/Navigation/Navigation.js`. In that file, let's import [the `Link` component and the `routes` object](../router.md#link-and-named-route-functions) from `@redwoodjs/router`. +We'll also import [`useAuth`](../authentication.md#api) since we'll need to subscribe to the auth state for our component to decide what to render: + +```jsx title="web/src/components/Navigation/Navigation.js" +import { Link, routes } from '@redwoodjs/router' +import { useAuth } from '@redwoodjs/auth' + +const Navigation = () => { + return +} + +export default Navigation +``` + +Let's destructure `isAuthenticated` from the `useAuth` hook and use it in some conditionals: + +```jsx {5,8-12} title="web/src/components/Navigation/Navigation.js" +import { Link, routes } from '@redwoodjs/router' +import { useAuth } from '@redwoodjs/auth' + +const Navigation = () => { + const { isAuthenticated } = useAuth() + return ( + + ) +} + +export default Navigation +``` + +Because Redwood Auth uses [React's Context API](https://reactjs.org/docs/context.html) to manage and broadcast the auth state, we can be confident that `isAuthenticated` will always be up-to-date, even if it changes from within another component in the tree (so long as it's a child of ``). In our case, when `isAuthenticated` changes, React will auto-magically take care of rendering the appropriate components. + +Now let's import our sign out button and add it, as well as sign in and sign up links, to the appropriate blocks in the conditional: + +```jsx {3,9-16} title="web/src/components/Navigation/Navigation.[js/tsx]" +import { Link, routes } from '@redwoodjs/router' +import { useAuth } from '@redwoodjs/auth' +import SignoutBtn from 'src/components/SignoutBtn/SignoutBtn' + +const Navigation = () => { + const { isAuthenticated } = useAuth() + return ( + + ) +} + +export default Navigation +``` + +We have a working navigation component, but we still need to render it somewhere. Let's [generate a layout](../cli-commands.md#generate-layout) called GlobalLayout: + +```bash +yarn redwood generate layout global +``` + +Then import and render the navigation component in the newly-generated `web/src/layouts/GlobalLayout/GlobalLayout`: + +```jsx title="web/src/layouts/GlobalLayout/GlobalLayout.js" +import Navigation from 'src/components/Navigation/Navigation' + +const GlobalLayout = ({ children }) => { + return ( + <> +
    + +
    +
    {children}
    + + ) +} + +export default GlobalLayout +``` + +Finally, we'll wrap each of our generated pages in this `GlobalLayout` component. To do this efficiently, we'll update the routes defined in our `web\src\Routes.[js/tsx]` file with the [`Set` component](../router.md#sets-of-routes): + +```jsx title="web/src/Routes.[js/tsx]" +import { Router, Route, Set } from '@redwoodjs/router' +import GlobalLayout from 'src/layouts/GlobalLayout/GlobalLayout' + +const Routes = () => { + return ( + + + + + + + + + ) +} + +export default Routes +``` + +Now we have navigation that renders the correct links and buttons based on our auth state. When the user signs in, they'll see a **Sign Out** button. When the user signs out, they'll see **Sign Up** and **Sign In** links. + +## Wrapping Up + +We've configured Supabase GoTrue Auth with Redwood Auth, created a Sign Up page, a Sign In page, and a Sign Out button, and added auth links to our layout. Nicely done! + +As you continue refining your app, the following resources may come in handy: + +- [Redwood Supabase Auth Installation & Setup](../authentication.md#supabase) +- [Redwood Auth Playground](https://redwood-playground-auth.netlify.app/supabase) +- [Redwood Supabase Auth Client Implementation](https://github.com/redwoodjs/redwood/blob/main/packages/auth/src/authClients/supabase.ts) +- [Supabase GoTrue client implementation](https://github.com/supabase/gotrue-js/blob/d7b334a4283027c65814aa81715ffead262f0bfa/src/GoTrueClient.ts) + +Finally, keep the following features in mind (future how to's could go deep into any of these): + +- Authentication state changes can be observed via an event listener. The [Supabase Auth playground](https://github.com/redwoodjs/playground-auth/blob/main/web/src/lib/code-samples/supabase.md) shows an example. +- Authentication options include... + - Passwordless (enter email and get a magic confirmation link) + - Third party (via GitHub, Google, etc) + - Phone one-time password + - Sign in with refresh token (JWTs are a critical part of the auth implementation) + +Thanks for tuning in! + +> If you spot an error or have trouble completing any part of this recipe, please feel free to open an issue on [Github](https://github.com/redwoodjs/redwoodjs.com) or create a topic on our [community forum](https://community.redwoodjs.com/). diff --git a/docs/versioned_docs/version-7.0/how-to/test-in-github-actions.md b/docs/versioned_docs/version-7.0/how-to/test-in-github-actions.md new file mode 100644 index 000000000000..d69484779906 --- /dev/null +++ b/docs/versioned_docs/version-7.0/how-to/test-in-github-actions.md @@ -0,0 +1,385 @@ +# Testing Redwood in GitHub actions + +A good testing strategy is important for any project. Redwood offers a few different types of tests that you can write to make your app more robust—to ship with confidence. In this guide we'll focus on how to run your Redwood tests in GitHub Actions, so you can test your app on every push or pull request. + +We'll set up a tiny project with a few tests and a Postgres database that'll be created and used in every test run on GitHub. If you need to set up tests for an existing project, or if you want to write better tests, check out the (amazing) [Testing](../testing) docs. + +## Background + +Let's start by introducing some concepts and products that we'll use in this guide. Then we'll get to the code. + +### Continuous Integration + +Continuous Integration (CI) is the practice of automatically running your tests on every push or pull request. This is a great way to catch bugs before they're merged into your main branch. + +### Continuous Deployment + +Continuous Deployment (CD) is the practice of automatically deploying your app (and database in this case) to a server after every successful test run. This is a great way to make sure your app or database is always up to date. + +### GitHub Actions and GitHub Secrets + +GitHub Actions is a service that allows you to run a series of commands on a virtual machine. You can use it to run tests, deploy your app, or do anything else you may think of. It's free for public repositories and has a free tier for private ones. For more information, check out [GitHub Actions' docs](https://docs.GitHub.com/en/actions). + +GitHub Secrets is a way to store sensitive information like API keys or passwords needed by GitHub Actions. They are encrypted and only exposed to the GitHub Actions service. You can use them to pass sensitive information to your tests or deploy script. For more information, check out [GitHub Secrets' docs](https://docs.GitHub.com/en/actions/security-guides/encrypted-secrets#creating-encrypted-secrets-for-a-repository). + +## How to run tests in GitHub Actions + +All right, let's get to the code. In this how to, we'll focus on how to run your tests in GitHub Actions, but not how to write your tests (see the [Testing](../testing.md) doc for that). + +If you already have a project, you can skip to [4. Set up GitHub Actions](#4-set-up-GitHub-actions). + +### 1. Create a Redwood app + +Start by creating a Redwood app and `cd`ing into it: + +```sh +yarn create redwood-app rw-testing-gh-actions +cd rw-testing-gh-actions +``` + +Then make sure everything is working: + +```sh +yarn rw test +``` + +If it is, you should see something like this: + +```sh +... + + PASS api api/src/directives/requireAuth/requireAuth.test.ts + PASS api api/src/directives/skipAuth/skipAuth.test.ts + +Test Suites: 2 passed, 2 total +Tests: 3 passed, 3 total +Snapshots: 0 total +Time: 1.669 s +Ran all test suites. + +Watch Usage: Press w to show more. +``` + +### 2. Modify the Prisma schema + +For the purpose of this how to, we'll use the `UserExample` model that comes with the Redwood app. +We'll also change the database to Postgres since that's what we'll be using in our GitHub Actions. + +:::note Make sure you have a Postgres instance ready to use + +Here's a handy guide for how to [set it up locally](../local-postgres-setup). We'll need the connection string so our Redwood app knows where to store the data + +::: + +On to the changes. Modify your `schema.prisma` file to look like this: + +```graphql title="api/db/prisma.schema" +datasource db { + provider = "postgresql" + url = env("DATABASE_URL") +} + +generator client { + provider = "prisma-client-js" + binaryTargets = "native" +} + +model UserExample { + id Int @id @default(autoincrement()) + email String @unique + name String? +} +``` + +Then add your connection strings to your `.env` file: + +:::warning + +Make sure you don't commit this file to your repo since it contains sensitive information. + +::: + +```bash +DATABASE_URL=postgres://postgres:postgres@localhost:54322/postgres +TEST_DATABASE_URL=postgres://postgres:postgres@localhost:54322/postgres +``` + +You need one connection string for your development database and one for your test database. Read more about it in the testing doc's [The Test Database](../testing#the-test-database) section. + +Next, navigate to the `scripts/seed.ts` file. Uncomment the contents of the array that contains the "fake" users. We'll also use the `createMany` method for inserting records in the database so we can skip the duplicates (see the [Prisma docs](https://www.prisma.io/docs/reference/api-reference/prisma-client-reference#createmany) for more info). When you're all done, it should look like this: + +```ts title="scripts/seed.ts" + ... + + const data: Prisma.UserExampleCreateArgs['data'][] = [ + // To try this example data with the UserExample model in schema.prisma, + // uncomment the lines below and run 'yarn rw prisma migrate dev' + // + { name: 'alice', email: 'alice@example.com' }, + { name: 'mark', email: 'mark@example.com' }, + { name: 'jackie', email: 'jackie@example.com' }, + { name: 'bob', email: 'bob@example.com' }, + ] + console.log( + "\nUsing the default './scripts/seed.{js,ts}' template\nEdit the file to add seed data\n" + ) + + // Note: if using PostgreSQL, using `createMany` to insert multiple records is much faster + // @see: https://www.prisma.io/docs/reference/api-reference/prisma-client-reference#createmany + Promise.all( + // + // Change to match your data model and seeding needs + // + data.map(async (data: Prisma.UserExampleCreateArgs['data']) => { + const record = await db.userExample.createMany({ + data, + skipDuplicates: true, + }) + console.log(record) + }) + ) + + ... +``` + +Finally, migrate your database: + +```sh +yarn rw prisma migrate dev --name init +``` + +### 3. Generate the UserExample scaffold + +We need some real tests to work with. Scaffolding out the `UserExample` model gives us everything we need to create "users" in our app, including some of the services tests which interact with our test database: + +```sh +yarn rw g scaffold UserExample +``` + +Make sure everything is still working: + +```sh +yarn rw test +``` + +You should see something like this: + +```sh + PASS web web/src/lib/formatters.test.tsx + PASS api api/src/services/userExamples/userExamples.test.ts + +Test Suites: 2 passed, 2 total +Tests: 21 passed, 21 total +Snapshots: 0 total +Time: 3.587 s +Ran all test suites related to changed files in 2 projects. +``` + +### 4. Set up GitHub Actions + +Create a new file in the `.github/workflows` directory (create those directories if they don't exist) called `ci.yml` and add the following: + +:::note + +This action only runs when the `main` branch is updated, but you can configure it to run on any other branch. + +::: + +```yml title=".github/workflows/ci.yml" +name: Redwood CI + +on: + push: + branches: ['main'] + pull_request: + branches: ['main'] + +env: + DATABASE_URL: postgres://postgres:postgres@localhost:5432/postgres + TEST_DATABASE_URL: postgres://postgres:postgres@localhost:5432/postgres + +jobs: + build: + runs-on: ubuntu-latest + + strategy: + matrix: + node-version: [20.x] + # See supported Node.js release schedule at https://nodejs.org/en/about/releases/ + + services: + # Label used to access the service container + postgres: + # Docker Hub image + image: postgres + # Provide the password for postgres + env: + POSTGRES_PASSWORD: postgres + # Set health checks to wait until postgres has started + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + # Maps tcp port 5432 on service container to the host + - 5432:5432 + + steps: + - uses: actions/checkout@v3 + - name: Use Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v3 + with: + node-version: ${{ matrix.node-version }} + # install all the dependencies + - run: yarn install + # build the redwood app + - run: yarn rw build + # run the api tests + - run: yarn rw test api --no-watch + # run the web tests + - run: yarn rw test web --no-watch +``` + +Now push your changes to the `main` branch on GitHub; the "Redwood CI" action we just made will run like this: + +ci-results-1 + +1. Set up the job ("build") +2. Initialize the containers and create the postgres instance +3. Checkout the code +4. Set up Node.js +5. Install the Redwood app's dependencies +6. Build the Redwood app +7. Run the api tests +8. Run the web tests +9. Clean up the environment + +At this point, if all is well, you may start feeling the joy of automated tests! You push a commit, the Action runs, your tests pass, and you get a green checkmark. To savor this moment, consider updating one of your unit tests, making it fail. Push again. Watch it fail. Fix it. Push again. Watch it pass. Repeat and enjoy. + +### 5. Set up CI on pull requests only + +We want tests to run on every pull request so we can make sure that our code is working as expected. +Update the `ci.yml` file by removing the `push` event. The first lines should look like this: + +```yml title=".github/workflows/ci.yml" +name: Redwood CI for Pull Requests + +on: + pull_request: + branches: ['main'] + +... +``` + +Now, if you open or push to a pull request, this action will run and you'll see something like this: + +ci-pr-1 + +Once the action is done running, you can see the results in the "Conversation" tab: + +ci-pr-2 + +### 6. Deploy the database changes to an actual database + +Now for the CD—we want to use another action to deploy the database changes to an actual database, so we can automatically deploy the latest and greatest to a real environment. In this action we'll run the tests one more time against the local database, then deploy the database migrations to the external database. + +Create a new file in the `.github/workflows` directory called `cd.yml` and add the following: + +```yml title=".github/workflows/cd.yml" +name: Redwood CD for database deployment + +on: + push: + branches: ['main'] + +env: + DATABASE_URL: postgres://postgres:postgres@localhost:5432/postgres + TEST_DATABASE_URL: postgres://postgres:postgres@localhost:5432/postgres + +jobs: + build: + runs-on: ubuntu-latest + + strategy: + matrix: + node-version: [20.x] + # See supported Node.js release schedule at https://nodejs.org/en/about/releases/ + + services: + # Label used to access the service container + postgres: + # Docker Hub image + image: postgres + # Provide the password for postgres + env: + POSTGRES_PASSWORD: postgres + # Set health checks to wait until postgres has started + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + # Maps tcp port 5432 on service container to the host + - 5432:5432 + + steps: + - uses: actions/checkout@v3 + - name: Use Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v3 + with: + node-version: ${{ matrix.node-version }} + # install all the dependencies + - run: yarn install + # build the redwood app + - run: yarn rw build + # run the api tests + - run: yarn rw test api --no-watch + # run the web tests + - run: yarn rw test web --no-watch + # run migrations on the actual database + - run: yarn rw prisma migrate deploy + # run seed script in the actual db + - run: yarn rw prisma db seed +``` + +The main changes are: + +- We only run the action on push events to the `main` branch +- We run the migrations and seed scripts after the tests + +### 7. Set up GitHub Secrets + +Because you're using an actual Postgres instance in your action, you need to set up the secrets for the database connection so that the username and password stay secret. + +Go to the "Settings" tab in your GitHub repo and click "Secrets", then "Actions", then "New repository secret". +In the name field, type `DATABASE_URL`. In the value field, put the actual secret—something like this: + +``` +postgres://[USER_NAME]:[PASSWORD]@[HOST]:[PORT]/postgres +``` + +When you're done, click "Add secret". This creates a new secret that you can use in your GitHub Actions. In this case, it species the connection string for the database we'll deploy changes to. + +You can use the secret in your GitHub Actions by using the `${{ secrets.DATABASE_URL }}` syntax: + +```yml +env: + DATABASE_URL: ${{ secrets.DATABASE_URL }} +``` + +Now you can merge the PR and the database changes will be tested, then deployed to the actual database. + +What's next? It is up to you—refine and streamline! + +As you consider automating your project workflows, keep the following wise philosophical observation in mind... + +
    +

    + Civilization advances by extending the number of important operations we can perform without thinking. +

    + + —Alfred North Whitehead + +
    diff --git a/docs/versioned_docs/version-7.0/how-to/using-a-third-party-api.md b/docs/versioned_docs/version-7.0/how-to/using-a-third-party-api.md new file mode 100644 index 000000000000..54db73a24da7 --- /dev/null +++ b/docs/versioned_docs/version-7.0/how-to/using-a-third-party-api.md @@ -0,0 +1,546 @@ +# Using a Third Party API + +The time will come when you'll need data from a source you don't own. This how to will present the scenario of accessing a third party's API from a Redwood app. We'll show an example of accessing an API from both the client side and the server side. + +We're going to build a simple weather app that will display the current weather in the user's zip code (we'll assume only zip codes in the United States of America to keep the example code as simple as possible). To do this we'll get the current weather from the [OpenWeather API](https://openweathermap.org/) and display it on the only page of our app, the homepage. The final app could look something like this (if you apply a little more styling on top of the basic version we'll build): + +![image](https://user-images.githubusercontent.com/300/79395970-af551280-7f2f-11ea-9b8c-870fc2bfdd36.png) + +> If you just want to skip to the code, you can get the repo for both the client and server implementation here: https://github.com/redwoodjs/cookbook-third-party-apis You will still need a valid API key from OpenWeather, so don't skip the **Setup** steps below! + +## Setup + +You'll need to [create a free account](https://home.openweathermap.org/users/sign_up) on OpenWeather to get an API key. You'll be able to make 1,000 calls per day, which is more than enough for our sample app (with enough left over that you can release this as a private weather station for your family and friends). + +Once you've created your account and verified your email address, go to the API keys tab and copy your default key: + +![image](https://user-images.githubusercontent.com/300/79375024-d0f0d280-7f0c-11ea-81a8-364659755efa.png) + +(That's not a real key so don't even think about trying to steal it!) + +For some reason it can take up to 30 minutes for OpenWeather to enable your API key, so while we're waiting for them let's see what a sample API call will return: https://samples.openweathermap.org/data/2.5/weather?zip=94040,us&appid=439d4b804bc8187953eb36d2a8c26a02 + +```json +{ + "coord": { + "lon": -122.09, + "lat": 37.39 + }, + "weather": [ + { + "id": 500, + "main": "Rain", + "description": "light rain", + "icon": "10d" + } + ], + "base": "stations", + "main": { + "temp": 280.44, + "pressure": 1017, + "humidity": 61, + "temp_min": 279.15, + "temp_max": 281.15 + }, + "visibility": 12874, + "wind": { + "speed": 8.2, + "deg": 340, + "gust": 11.3 + }, + "clouds": { + "all": 1 + }, + "dt": 1519061700, + "sys": { + "type": 1, + "id": 392, + "message": 0.0027, + "country": "US", + "sunrise": 1519051894, + "sunset": 1519091585 + }, + "id": 0, + "name": "Mountain View", + "cod": 200 +} +``` + +Good ol' faithful JSON. Let's see, what can we use here to display on our site? How about the `name` of the city that the zip is in, the `main.temp` (listed here in Kelvin, so we'll need to [convert](https://www.google.com/search?q=297+kelvin+to+fahrenheit&oq=297+kelvin+to+fahrenheit)) and then under the `weather` key we have an array with a `main` that lists the current conditions in english. How about that `icon`? Turns out OpenWeather has some we can use! Just take the icon code and use it in a URL like http://openweathermap.org/img/wn/10d@2x.png + +![rain icon](https://user-images.githubusercontent.com/300/79376259-c33c4c80-7f0e-11ea-8285-701375665451.png) + +If enough time has passed your real API key may be activated. You can try seeing the weather in the geographic center of the US (make sure to append your API key to the end of this URL): https://api.openweathermap.org/data/2.5/weather?zip=66952,us&appid= + +If it's still not ready let's start working on the app and hopefully it will be by the time we're done. You can always use the sample URL and forever see the unchanging weather in Mountain View, California. + +## Create the App + +We'll start our app the way we start all Redwood apps: + +```bash +yarn create redwood-app weatherstation +cd weatherstation +yarn rw dev +``` + +That will open a browser to http://localhost:8910. Let's create a landing page: + +```bash +yarn rw generate page home / +``` + +> If you like typing you can use the full command `yarn redwood generate page home /` + +The browser should have refreshed with a message about where to find our new homepage, `web/src/pages/HomePage/HomePage.js`. Let's open that up and create a form so the user can actually enter their zip code: + +```jsx title="web/src/pages/HomePage/HomePage.js" +import { Form, TextField, Submit } from '@redwoodjs/forms' + +const HomePage = () => { + const onSubmit = (data) => { + console.info(data) + } + + return ( +
    + + Go + + ) +} + +export default HomePage +``` + +This gives us a very simple form and some validation that the user is entering a 5 digit zip code. If you open your Web Inspector and click **Go** you should see the zip code appear in the console: + +![console output](https://user-images.githubusercontent.com/300/79378210-c8e76180-7f11-11ea-949d-2bacae483559.png) + +Now let's talk to the API and get some data for real. We can do that in one of two ways: + +1. Have the client (React app running in the browser) talk to the API directly +2. Have our own server (or serverless function, in the case of Redwood) talk to the API, and have the client talk to *our* server. + +We'll build out an example of both types of integration below. + +## Client-side API Integration + +For the first version of our client-side integration let's access the API directly on the client. What are the pros on cons? + +**Pros** + +* Simplest design: no server design/build needed +* Fewest network calls: one! +* Fast: calling directly to the API + +**Cons** + +* Insecure: users could inspect the page source and get our API key +* No throttling: someone could write a bot to hit the page thousands of times a second + +You'll need to balance these risks in a real-world app so choose carefully! + +### Fetching the weather data + +We've got the zip code in our `onSubmit` handler so it makes sense to simply make the API call from there and then do something with the result. We'll use the browser's built in [Fetch API](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API) since it does exactly what we need. For now let's just dump the result to the console (be sure to use your actual API key): + +```jsx title="web/src/pages/HomePage/HomePage.js" +const onSubmit = (data) => { + fetch('https://api.openweathermap.org/data/2.5/weather?zip=66952,us&appid=YOUR_API_KEY') + .then(response => response.json()) + .then(json => console.info(json)) +} +``` + +![image](https://user-images.githubusercontent.com/300/79379271-858df280-7f13-11ea-97f0-5020f875170d.png) + +> If it turns out your API key still isn't ready, you'd think you could just replace the URL in the fetch with the sample response endpoint instead, but this causes a CORS error. At this point you'll just need to wait for your API key to start working! + +Well that was easy! We have the zip code hardcoded into that URL so let's replace that with the actual value from our text box: + +```jsx title="web/src/pages/HomePage/HomePage.js" +const onSubmit = (data) => { + fetch(`https://api.openweathermap.org/data/2.5/weather?zip=${data.zip},us&appid=YOUR_API_KEY`) + .then(response => response.json()) + .then(json => console.info(json)) +} +``` + +### Showing the weather on the page + +We're getting our data just fine but now we need to update the page with the weather. Let's use state to keep track of the result and trigger a refresh in the UI (don't forget the new fragment `<> ` around the form and weather output): + +```jsx title="web/src/pages/HomePage/HomePage.js" +import { useState } from 'react' +import { Form, TextField, Submit } from '@redwoodjs/forms' + +const HomePage = () => { + const [weather, setWeather] = useState() + + const onSubmit = (data) => { + fetch( + `https://api.openweathermap.org/data/2.5/weather?zip=${data.zip},us&appid=YOUR_API_KEY` + ) + .then((response) => response.json()) + .then((json) => setWeather(json)) + } + + return ( + <> +
    + + Go + + {weather && JSON.stringify(weather)} + + ) +} + +export default HomePage +``` + +That should give us a simple text dump of the JSON: + +![image](https://user-images.githubusercontent.com/300/79381373-bae80f80-7f16-11ea-9159-dd08e6ac7ade.png) + +Finally, let's output the actual weather data along with a couple of helper functions to format the output: + +```jsx title="web/src/pages/HomePage/HomePage.js" +import { useState } from 'react' +import { Form, TextField, Submit } from '@redwoodjs/forms' + +const HomePage = () => { + const [weather, setWeather] = useState() + + const onSubmit = (data) => { + fetch( + `https://api.openweathermap.org/data/2.5/weather?zip=${data.zip},us&appid=YOUR_API_KEY` + ) + .then((response) => response.json()) + .then((json) => setWeather(json)) + } + + const temp = () => Math.round(((weather.main.temp - 273.15) * 9) / 5 + 32) + + const condition = () => weather.weather[0].main + + const icon = () => { + return `http://openweathermap.org/img/wn/${weather.weather[0].icon}@2x.png` + } + + return ( + <> +
    + + Go + + {weather && ( +
    +

    {weather.name}

    +

    + + + {temp()}°F and {condition()} + +

    +
    + )} + + ) +} + +export default HomePage +``` + +![image](https://user-images.githubusercontent.com/300/79381535-fbe02400-7f16-11ea-87f8-119bdb121765.png) + +It's not pretty, but it works! We'll leave the styling to you! + +> You can see the final code, with styling, here: https://github.com/redwoodjs/cookbook-third-party-apis/blob/main/web/src/pages/ClientPage/ClientPage.js + +## Server-side API Integration + +If you weighed the pros and cons presented earlier and found too many cons on the client-side implementation, then it looks like we're making our call on the server. To do that we'll need to do two things + +1. Provide a way for the client to talk to our server(less function) +2. A way for our server(less function) to talk to the third party API + +Redwood comes with GraphQL integration built in so that seems like a logical way to get our client talking to our serverless function. Let's create a GraphQL SDL (to define the API interface for the client) and a service (to actually implement the logic of talking to the third-party API). + +> **Doesn't Redwood have a generator for this?** +> +> Redwood does have an SDL generator, but it assumes you have a model defined in `api/db/schema.prisma` and so creates the SDL you need to access that data structure. If you're creating a custom one you're on your own! + +### The GraphQL API + +We can create whatever data structure we want so let's take this opportunity to strip out the data we don't care about coming from OpenWeather and just return the good stuff: + +```javascript title="api/src/graphql/weather.sdl.js" +export const schema = gql` + type Weather { + zip: String! + city: String! + conditions: String! + temp: Int! + icon: String! + } + + type Query { + getWeather(zip: String!): Weather! @skipAuth + } +` +``` + +This data structure returns just the data we care about, and we can even pre-format it on the server (convert Kelvin to Fahrenheit and get the icon URL). We have a Query type `getWeather` that accepts the zip code (note that it's a `String` because it could start with a `0`) and returns our `Weather` type defined above. + +### The Service + +That's it for our client-to-server API interface! Now let's define the GraphQL resolver that will actually get the data from OpenWeather. We'll take it one step at a time and first make sure we can access our new GraphQL endpoint. We'll define the `getWeather` function to just return some dummy data in the format we require. + +In Redwood GraphQL Query types are automatically mapped to functions exported from a service with the same name, so we'll create a `weather.js` service and name the function `getWeather`: + +```javascript title="api/src/services/weather/weather.js" +export const getWeather = ({ zip }) => { + return { + zip, + city: 'City', + conditions: 'Hot Lava', + temp: 1000, + icon: 'https://placekitten.com/100/100', + } +} +``` + +How can we test this out? Redwood ships with a GraphQL playground that you can use to access your API! Open a browser tab to http://localhost:8911/graphql + +![image](https://user-images.githubusercontent.com/300/79391348-3dc49680-7f26-11ea-8d94-8567ae287fa6.png) + +We'll enter our query at the top left and the variables (zip) at the lower left. Click the huge "Play" button in the middle of the screen and you should see the result of our query: + +![image](https://user-images.githubusercontent.com/300/79395014-9cd9d980-7f2d-11ea-83b1-45aaa8506706.png) + +Okay lets pull the real data from OpenWeather now. We'll use a package `@whatwg-node/fetch` that mimics the Fetch API in the browser: + +```bash +yarn workspace api add @whatwg-node/fetch +``` + +And import that into the service and make the fetch. Note that `fetch` returns a Promise so we're going to convert our service to `async`/`await` to simplify things: + +```javascript title="api/src/services/weather/weather.js" +import { fetch } from '@whatwg-node/fetch' + +export const getWeather = async ({ zip }) => { + const response = await fetch( + `http://api.openweathermap.org/data/2.5/weather?zip=${zip},US&appid=YOUR_API_KEY` + ) + const json = await response.json() + + return { + zip, + city: json.name, + conditions: json.weather[0].main, + temp: Math.round(((json.main.temp - 273.15) * 9) / 5 + 32), + icon: `http://openweathermap.org/img/wn/${json.weather[0].icon}@2x.png` + } +} +``` + +If you click "Play" in the GraphQL playground we should see the real data from the API: + +![image](https://user-images.githubusercontent.com/300/79607107-8ce60500-80a7-11ea-8b1d-fe1cd3e1d3dd.png) + +### Displaying the weather + +All that's left now is to display it in the client! Since we're getting data from our GraphQL API we can use a Redwood Cell to simplify all the work that goes around writing API access, displaying a loading state, etc. We can use a generator to get the shell of our Cell: + +```bash +yarn rw generate cell weather +``` + +This will create `web/src/components/WeatherCell/WeatherCell.js`: + +```jsx title="web/src/components/WeatherCell/WeatherCell.js" +export const QUERY = gql` + query FindWeatherQuery($id: Int!) { + weather: weather(id: $id) { + id + } + } +` + +export const Loading = () =>
    Loading...
    + +export const Empty = () =>
    Empty
    + +export const Failure = ({ error }) => ( +
    Error: {error.message}
    +) + +export const Success = ({ weather }) => { + return
    {JSON.stringify(weather)}
    +} +``` + +Let's update the QUERY to match the signature of our API: + +```jsx +export const QUERY = gql` + query GetWeatherQuery($zip: String!) { + weather: getWeather(zip: $zip) { + zip + city + conditions + temp + icon + } + } +` +``` + +Note the `weather: getWeather` part. This will actually call the API endpoint `getWeather` but the response will be renamed to `weather` and then given to the `Success` component. + +Let's leave the display as-is for now to make sure this is working. We'll use the `WeatherCell` in our `HomePage` and introduce some state to keep track of when the zip is submitted: + +```jsx title="web/src/pages/HomePage/HomePage.js" +import { Form, TextField, Submit } from '@redwoodjs/forms' +import { useState } from 'react' +import WeatherCell from 'src/components/WeatherCell' + +const HomePage = () => { + const [zip, setZip] = useState() + + const onSubmit = (data) => { + setZip(data.zip) + } + + return ( + <> +
    + + Go + + {zip && } + + ) +} + +export default HomePage +``` + +If your copy/paste-fu is strong you should get a dump of the JSON from the GraphQL call: + +![image](https://user-images.githubusercontent.com/300/79393218-bb3dd600-7f29-11ea-9b3a-3f2bbd854ed8.png) + +Now all that's left is to format everything a little nicer. How about a little something like this in `WeatherCell`: + +```jsx title="web/src/components/WeatherCell/WeatherCell.js" +export const Success = ({ weather }) => { + return ( +
    +

    {weather.city}

    +

    + + + {weather.temp}°F and {weather.conditions} + +

    +
    + ) +} +``` + +![image](https://user-images.githubusercontent.com/300/79393411-2091c700-7f2a-11ea-8760-8938d55b1ef5.png) + +### Extra Credit! Invalid zip codes? + +What if the user inputs an invalid zip code, like **11111**? + +![image](https://user-images.githubusercontent.com/2321110/137649805-5a9f6f4d-4f66-4758-9e47-f1a8a985bdda.png) + +Gross. This happens when our service tries to parse the response from OpenWeather and can't find one of the data points we're looking for (the array under the `weather` key). We should put together a nicer error message than that. Let's look at the response from OpenWeather when you enter a zip code that doesn't exist: https://api.openweathermap.org/data/2.5/weather?zip=11111,us&appid=YOUR_API_KEY + +```json +{ + "cod": "404", + "message": "city not found" +} +``` + +Okay, let's look for that `cod` and if it's `404` then we know the zip isn't found and can return a more helpful error from our service. Open up the service and let's add a check: + +```javascript {2, 10-12} title="api/src/services/weather/weather.js" +import { fetch } from '@whatwg-node/fetch' +import { UserInputError } from '@redwoodjs/graphql-server' + +export const getWeather = async ({ zip }) => { + const response = await fetch( + `http://api.openweathermap.org/data/2.5/weather?zip=${zip},US&appid=YOUR_API_KEY` + ) + const json = await response.json() + + if (json.cod === '404') { + throw new UserInputError(`${zip} isn't a valid US zip code, please try again`) + } + + return { + zip, + city: json.name, + conditions: json.weather[0].main, + temp: Math.round(((json.main.temp - 273.15) * 9) / 5 + 32), + icon: `http://openweathermap.org/img/wn/${json.weather[0].icon}@2x.png`, + } +} +``` + +And now if we submit **11111**: + +![image](https://user-images.githubusercontent.com/2321110/137649849-49d3aa66-e08b-44f8-93b9-c8a61f1e5ce9.png) + +That's much better! Let's strip out that "Error: " part, and maybe make it look a little more error-like. This is a job for the `Failure` component in our `WeatherCell`: + +```jsx title="web/src/components/WeatherCell/WeatherCell.js" +export const Failure = ({ error }) => ( + + {error.message} + +) +``` + +![image](https://user-images.githubusercontent.com/2321110/137649934-35c7b0e1-9b10-409e-8dbb-6a133aeb14bd.png) + +Much better! + +## Conclusion + +We hope this has given you enough confidence to go out and capture data from some of the amazing APIs of the Information Superhighway and get it (them?) into your Redwood app! + +Picking up any new framework from scratch is a daunting task and even those of us that wrote this one made more than a few trips to Google! If you think we can improve on this recipe, or any other, open an [issue](https://github.com/redwoodjs/redwoodjs.com/issues) or a [pull request](https://github.com/redwoodjs/redwoodjs.com/pulls). diff --git a/docs/versioned_docs/version-7.0/how-to/using-nvm.md b/docs/versioned_docs/version-7.0/how-to/using-nvm.md new file mode 100644 index 000000000000..05ed1f716bca --- /dev/null +++ b/docs/versioned_docs/version-7.0/how-to/using-nvm.md @@ -0,0 +1,119 @@ +# Using nvm + +## What is nvm? + +[nvm](https://github.com/nvm-sh/nvm) is a Node Version Manager. It's perfect for running multiple versions of Node.js on the same machine. + +## Installing nvm + +:::warning +If you've already installed Node.js on your machine, uninstall Node.js before installing nvm. This will prevent any conflicts between the Node.js and nvm. + +### If you're on a Mac +You can uninstall by running the following command in your terminal: + +```bash +brew uninstall --force node +``` + +Once that's finished, run the following command to remove unused folders and dependencies: +```bash +brew cleanup +``` + +### If you're on Windows + +- Go to the start menu, search and go to **Settings** +- Click on the **Apps** section +- In the search box under **Apps & Features** section, search for **Nodejs** +- Click on **Nodejs** and click on **Uninstall** +- We recommend restarting your machine, even if you're not prompted to do so +::: + +### If you're on a Mac +You can install `nvm` using [Homebrew](https://brew.sh/): + +```bash +brew install nvm +``` + +### If you're on Windows +Reference the [nvm-windows](https://github.com/coreybutler/nvm-windows) repo. + +- Download the [latest installer](https://github.com/coreybutler/nvm-windows/releases) (nvm-setup.zip) +- Locate your zip file (should be in your downloads or wherever you've configured your downloads to be saved) and unzip/extract its contents +- Now, you should have a file called **nvm-setup.exe**. Double click on it to run the installer. +- Follow the instructions in the installer + +:::info +We have a specific doc for [Windows Development Setup.](/docs/how-to/windows-development-setup) +::: + +## How to use nvm + +To confirm that `nvm` was installed correctly, run the following command in your terminal: + +```bash +nvm --version +``` + +You should see the version number of `nvm` printed to your terminal. + +### To install the latest version of Node.js + +```bash +nvm install latest +``` + +### To install a specific version of Node.js + +```bash +nvm install +``` + +To see all the versions of Node that you can install, run the following command: + +```bash +nvm ls-remote +``` + +:::warning +You'll need to [install yarn](https://yarnpkg.com/getting-started/install) **for each version of Node that you install.** + +[Corepack](https://nodejs.org/dist/latest/docs/api/corepack.html) is included with all Node.js >=16.10 installs, but you must opt-in. To enable it, run the following command: + +```bash +corepack enable +``` + +We also have a doc specifically for [working with yarn](./using-yarn). +::: + +### To use a specific version of Node.js + +```bash +nvm use +``` + +:::info +Remember: [Redwood has specific Node.js version requirements.](../tutorial/chapter1/prerequisites.md#nodejs-and-yarn-versions) +::: + +### To see all the versions of Node.js that you have installed + +```bash +nvm ls +``` + +### To set the default version of Node.js + +```bash +nvm alias default <> +``` + +### To uninstall a specific version of Node.js + +```bash +nvm uninstall <> +``` + diff --git a/docs/versioned_docs/version-7.0/how-to/using-yarn.md b/docs/versioned_docs/version-7.0/how-to/using-yarn.md new file mode 100644 index 000000000000..5198209e6e85 --- /dev/null +++ b/docs/versioned_docs/version-7.0/how-to/using-yarn.md @@ -0,0 +1,72 @@ +# Using Yarn + +## What is Yarn? + +[Yarn](https://yarnpkg.com/) is a package manager for JavaScript. It is used to manage and install dependencies for JavaScript projects, particularly for Node.js applications. Yarn offers features like parallel package installations and offline caching and uses a `yarn.lock` file to control and reproduce consistent installations of dependencies across different environments. + +## Installing yarn + +> "The preferred way to manage Yarn is through [Corepack](https://nodejs.org/dist/latest/docs/api/corepack.html), a new binary shipped with all Node.js releases starting from 16.10."
    -[from the Yarn documentation](https://yarnpkg.com/getting-started/install) + +Corepack is included with all Node.js >=16.10 installs, but you must opt-in. To enable it, run the following command: + +```bash +corepack enable +``` + +## Using the correct version of yarn + +To see the version of yarn that you have installed, run the following command: + +```bash +yarn --version +``` + +**Redwood requires Yarn (>=1.22.21)** + +You can upgrade yarn by running the following command: + +```bash +corepack prepare yarn@stable --activate +``` + +:::info +If this command fails, you may need to [uninstall the current version of Yarn first](#uninstalling-yarn). + +```terminal +corepack disable +npm uninstall -g yarn --force +corepack enable +``` +::: + +## Installing packages and dependencies with yarn + +You'll need to run `yarn install` in the root of your project directory to install all the necessary packages and dependencies for your project. + +Redwood separates the backend (`api`) and frontend (`web`) concerns into their own paths in the codebase. ([Yarn refers to these as "workspaces"](https://yarnpkg.com/features/workspaces). In Redwood, we refer to them as "sides.") When you add packages going forward you'll need to specify which workspace they should go in. + +For example to install a package on the `web` or **frontend** side, you would run the following command: + +```bash +yarn workspace web add package-name +``` + +and to install a package on the `api` or **backend** side, you would run the following command: + +```bash +yarn workspace api add package-name +``` + +## Uninstalling yarn + +To uninstall yarn, run the following command: + +```bash +corepack disable +npm uninstall -g yarn --force +``` + +## Additional Information + +For additional information, you can refer directly to the [yarn documentation](https://yarnpkg.com/). diff --git a/docs/versioned_docs/version-7.0/how-to/windows-development-setup.md b/docs/versioned_docs/version-7.0/how-to/windows-development-setup.md new file mode 100644 index 000000000000..adddcef24c63 --- /dev/null +++ b/docs/versioned_docs/version-7.0/how-to/windows-development-setup.md @@ -0,0 +1,62 @@ +# Windows Development Setup + +This guide provides a simple setup to start developing a RedwoodJS project on Windows. Many setup options exist, but this aims to make getting started as easy as possible. This is the recommended setup unless you have experience with some other shell, like PowerShell. + +> If you're interested in using the Windows Subsystem for Linux instead, there is a [community guide for that](https://community.redwoodjs.com/t/windows-subsystem-for-linux-setup/2439). + +### Git Bash + +Download the latest release of [**Git for Windows**](https://git-scm.com/download/win) and install it. +When installing Git, you can add the icon on the Desktop and add Git Bash profile to Windows Terminal if you use it, but it is optional. + +![1-git_components.png](https://user-images.githubusercontent.com/18013532/146685298-b12ed1a5-fe99-4286-ab12-69cf0a7be139.png) + +Next, set VS Code as Git default editor (or pick any other editor you're comfortable with). + +![2-git_editor.png](https://user-images.githubusercontent.com/18013532/146685299-0e067554-a5a8-46b9-91ac-ffcd6f738b80.png) + +For all other steps, we recommended keeping the default choices. + +### Node.js environment (and npm) + +We recommend you install the latest `nvm-setup.zip` of [**nvm-windows**](https://github.com/coreybutler/nvm-windows/releases) to manage multiple version installations of Node.js. When the installation of nvm is complete, run Git Bash as administrator to install Node with npm. + +![3-git_run_as_admin.png](https://user-images.githubusercontent.com/18013532/146685300-1762a00a-26cb-4f8b-b480-c6aba4e26b89.png) + +Redwood uses the LTS version of Node. To install, run the following commands inside the terminal: + +```bash +$ nvm install lts --latest-npm +// installs latest LTS and npm; e.g. 16.13.1 for the following examples +$ nvm use 16.13.1 +``` + +### Yarn + +Now you have both Node and npm installed! Redwood also uses yarn, which you can now install using npm: + +```bash + npm install -g yarn +``` + +*Example of Node.js, npm, and Yarn installation steps* + +![4-install_yarn.png](https://user-images.githubusercontent.com/18013532/146685297-b361ebea-7229-4d8c-bc90-472773d06816.png) + +## Congrats! + +You now have everything ready to build your Redwood app. + +Next, you should start the amazing [**Redwood Tutorial**](tutorial/chapter1/installation.md) to learn how to use the framework. + +Or run `yarn create redwood-app myApp` to get started with a new project. + +## Troubleshooting + +### Beware case-insensitivity + +On Windows Git Bash, `cd myapp` and `cd myApp` will select the same directory because Windows is case-insensitive. But make sure you type the original capitalization to avoid strange errors in your Redwood project. + +### Microsoft Visual C++ Redistributable + +If your machine doesn't have Microsoft Visual C++ Redistributable, then you need to install it from [here](https://learn.microsoft.com/en-us/cpp/windows/latest-supported-vc-redist?view=msvc-170#visual-studio-2015-2017-2019-and-2022). diff --git a/docs/versioned_docs/version-7.0/intro-to-servers.md b/docs/versioned_docs/version-7.0/intro-to-servers.md new file mode 100644 index 000000000000..121097e67e6f --- /dev/null +++ b/docs/versioned_docs/version-7.0/intro-to-servers.md @@ -0,0 +1,474 @@ +--- +description: How to get started connecting to and deploying to a real server +--- + +# Introduction to Servers + +If you're looking at deploying to a real, physical server using something like the [Baremetal](/docs/deploy/baremetal) deploy option, you're going to need to get familiar with connecting to remote servers. On *nix-based systems (but also Windows after installing something like Powershell) this is handled by the [SSH](https://en.wikipedia.org/wiki/Secure_Shell) utility. In this doc we'll learn about the various strategies that SSH can use to connect: + +* Username & password +* Private key +* Public key + +## Terminology + +Let's define a few terms so we're on the same page going forward: + +* SSH - Secure Shell Protocol (where'd the P go in the acronym?) is the protocol used by the `ssh` command line utility we'll be using throughout this doc +* `ssh` - when shown in code font like this it's referring to the actual command line utility, rather than the all-encompassing "SSH" concept +* `ssh-agent` - another utility that keeps track of public and private keys and makes them available for use by the `ssh` utility + +## First Connect + +Before we can do anything else, we want to make sure that we can remotely connect to our server via SSH manually, as *nix folks have been doing for hundreds of years using SSH. Depending on how the server is configured, you'll connect with either a username and password, a private key known to the server, or a public key known to the server. We'll look at each one below. + +A requirement of all of these authentication methods is that you know the username of the account you're connecting to. You'll need to get this from your hosting/cloud provider, and it could be pretty different depending on who your host is: + +* If you use AWS and create an EC2 instance from an Ubuntu image, the user will be `ubuntu` +* If you use Amazon's own Linux image, it'll be `ec2user` +* If you create a Digital Ocean Droplet the user will be `root` + +Et cetera. Whatever it is, you'll need to know that before connecting. + +But first, a note about a yes/no prompt you'll see the first time using any of these connection methods... + +## Fingerprint Prompt + +Using any of the auth methods below will lead to the following prompt the first time you connect, and it's because you've never connected to that server before: + +``` +The authenticity of host '192.168.0.122 (192.168.0.122)' can't be established. +ED25519 key fingerprint is SHA256:FHQDzxsqA68c+BhLPUkyN8aAVrznDtekhPg/99JXk8Q. +This key is not known by any other names +Are you sure you want to continue connecting (yes/no/[fingerprint])? +``` + +This is a quick security check making sure that you're actually connecting to the computer that you think you are. You can be reasonably sure that it is, so just type "yes". You'll get a note letting you know that it's been added to the list of known hosts (and you won't be prompted when connecting to this server again): + +``` +Warning: Permanently added '192.168.0.122' (ED25519) to the list of known hosts. +``` + +You can see a list of all known servers by looking in this file: + +``` +~/.ssh/known_hosts +``` + +:::info Reusing IP addresses + +If you're connecting to cloud-based servers, turning them on and off, and potentially reusing IP addresses, you'll get an error message the next time you try to connect to that IP (because the signature of the server itself is now different than what's recorded in `known_hosts`. Find that line and delete it from `~/.ssh/known_hosts` and you'll be able to connect again. + +::: + +Once you're past that prompt you'll then either be prompted for your password, or logged in automatically (when using a private or public key). Let's look at each one in detail. + +:::warning Baremetal First Deploy Woes? + +If you're having trouble deploying to your server with Baremetal, and you've never connected to your server manually via SSH, this could be why: Baremetal provides no interactive prompt to accept this server fingerprint. You need to connect manually at least once before Baremetal can connect. + +::: + +### Username/Password + +Using username/password auth is pretty straight forward, just denote the user and server you want to connect to, either by its domain name or IP address. You will have created the password at the time you started your server, or maybe the host generated a random one. + +``` +ssh user@server.com +``` + +As a real example, here's how you would connect to a Digital Ocean Droplet. The user is `root` and the IP is `192.168.0.122`: + +``` +ssh root@192.168.0.122 +``` + +You will be prompted to enter the server's password, and your keystrokes are hidden which for some reason makes typing a password exponentially harder: + +``` +root@192.168.0.122's password: +``` + +You'll get three tries to get the password correct. + +Whether or not you connected successfully, skip ahead to the [Connected](#connected) section. + +### Private Key + +Some providers, like AWS, will give you a private key at the time the server is created, rather than a password. This file usually ends in `.pem`. Make sure you know where you put this file on your computer because, for now, it's the only way you'll be able to connect to your server. If you lose it, you'll need to terminate that instance and start a new one. I generally put them in the `~/.ssh` folder to keep all SSH-related stuff together, usually in a subdirectory. (I also move this directory to iCloud and then create a symlink back to `~/.ssh` so that it's synchronized across all of my systems.) + +:::info More About Public/Private Keypairs + +Learn more about [public/private key authentication](https://www.ssh.com/academy/ssh/public-key-authentication). But the gist is that you create two keys, one public and one private. Either one can encrypt a document, but, only the private key can *decrypt* it. This means that anyone can have the public key and it can be freely distrubted (thus the "public" name), and the owner of the private key can always verify that it was encrypted using the related public key. A related technique can happen in reverse: the private key can be used to create a signature of a document, and the public key can be used to *verify* that the signature was created by the matching private key. So you can get the original message, and after verifying the signature, trust that it was sent by the owner of the private key. + +You can't *decrypt* something with the public key that was encrypted with the private key, however. That would defeat the purpose of sharing the public key: anyone could read your message! If you need two-way encryption of messages then both parties could share their public keys and each would encrypt using the other party's public key. + +::: + +If you try connecting using that private key now, you'll most likely get a big scary message: + +``` +ssh -i ~/.ssh/keyname.pem ubuntu@192.168.0.122 + +@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ +@ WARNING: UNPROTECTED PRIVATE KEY FILE! @ +@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ +Permissions 0666 for '/Users/rob/.ssh/shared/algostake.pem' are too open. +It is required that your private key files are NOT accessible by others. +This private key will be ignored. +``` + +This is because files downloaded from the internet are given permissions that make them readable by anyone on the system. SSH doesn't like this. It wants you, and only you, to have access to this file. So we need to make it readable and writeable by only your user. + +Permission changes are handled by the `chmod` command. The [octal version](https://chmodcommand.com/chmod-600/) of the permissions you want to set is the number `600` (which sets read/write permissions for the owner of file, and nothing for anyone else). To change the permission of the file: + +``` +chmod 600 ~/.ssh/keyname.pem +``` + +Where `keyname` is whatever the actual name of the file is. Once you do this you're ready to use it to connect. You still need the username and address of the server, but we're also going to set the `-i` flag which instructs SSH to use a private key at a given path: + +``` +ssh -i ~/.ssh/keyname.pem ubuntu@192.168.0.122 +``` + +Skip ahead to the [Connected](#connected) section. + +### Public Key + +Some providers, like Digital Ocean, give you the opportunity to put your public key on the server automatically when it's created. This lets you avoid password and private key authentication completely, and is actually the preferred method of connecting via SSH that we'll end up with at [the end](#adding-your-ssh-public-key-to-the-server) of this guide! + +If you don't really know what a public key is, where yours is at, or what it means to put it on the server, then skip ahead to the [Creating a Public Key](#creating-a-public-key) section. + +You'll need your username and server address and that's it: + +``` +ssh ubuntu@192.168.0.122 +``` + +If you have a public key, but your server doesn't let you pre-load it onto the server, you'll need to do it manually. See [Adding Your SSH Public Key to the Server](#adding-your-ssh-public-key-to-the-server) below. + +## Connected + +Whatever auth method you go with, you should now be connected! If you get an error message that looks like this: + +``` +ubuntu@192.168.0.122: Permission denied (publickey,password). +``` + +It could be one of several things: + +* The username is wrong +* The password is wrong +* The public key your system is trying to connect with is not found on the server in its `~/.ssh/authorized_keys` file +* The private key you passed with the `-i` flag is not found on the server + +If you run the command again with the `-v` flag (verbose) you'll see everything that SSH is trying when it tries to log in. There are lots of resources on the internet to help you [troubleshoot](https://docs.digitalocean.com/support/how-to-troubleshoot-ssh-connectivity-issues/). + +Assuming you did not get an error, you should be logged in: + +``` +Welcome to Ubuntu 22.04 LTS (GNU/Linux 5.15.0-41-generic x86_64) + + * Documentation: https://help.ubuntu.com + * Management: https://landscape.canonical.com + * Support: https://ubuntu.com/advantage + + System information as of Wed Aug 3 21:09:41 UTC 2022 + + System load: 0.25439453125 Users logged in: 0 + Usage of /: 6.3% of 24.05GB IPv4 address for eth0: 192.168.0.122 + Memory usage: 22% IPv4 address for eth0: 10.48.0.5 + Swap usage: 0% IPv4 address for eth1: 10.124.0.2 + Processes: 97 + +0 updates can be applied immediately. + +The programs included with the Ubuntu system are free software; +the exact distribution terms for each program are described in the +individual files in /usr/share/doc/*/copyright. + +Ubuntu comes with ABSOLUTELY NO WARRANTY, to the extent permitted by +applicable law. + +root@remote-server:~# +``` + +This is the welcome message on an Ubuntu machine, yours may be different. + +The last line is actually your prompt where you can start typing commands. + +## Disconnecting + +Disconnecting from the server is just a simple `exit` command. You can also just close your terminal window and the connection will be terminated: + +```bash +root@remote-server:~# exit + +Connection to 192.168.0.122 closed. +``` + +## Simplifying Access + +Having to type your username or include a private key each time is not fun. Luckily SSH uses public/private key cryptography, and can verify your identity using your public key. You've probably uploaded your public key to GitHub in the **Settings > SSH and GPG keys** section. We'll do something similar here: put our local machine's public key on the server so that it knows it's us when we connect, and skip the password. + +:::info + +If you're already using [public key auth](#public-key) then you can probably skip this section—you're already doing it! + +::: + +You can have multiple public keys from multiple development machines on the server so you can connect from multiple computers. This comes in very handy when working on a team: when someone leaves you just remove their public key from the server. Contrast this with password authentication, where you either need to share the password to a single deploy user to all of your teammates, and then change the password when someone leaves, or give everyone a copy of the server's private key and change *that* every time someone leaves. Just adding their public keys is much simpler to manage. + +### Public/Private Keypairs + +You may already have a public/private keypair! Check in `~/.ssh` and look for two files with the same name before the extension, one with `.pub` on the end (`id_ed25519` and `id_ed25519.pub`, for example). If you don't remember actually putting these files in the directory, then they were probably generated by a program like `ssh-keygen`, and SSH is already using them! + +To see which of your keys SSH is already aware of, you can run this command to list them: + +``` +ssh-add -L +``` + +You should get zero or more lines containing public SSH keys, something like this: + +``` +ssh-ed25519 AAAAB3NzaC1yc2EAAAADAQABCU90x/khqD1sDW= rob@computer.local +``` + +If I compare that to the content of my `~/.ssh/id_ed25519.pub` file I can see that they match! Great, so SSH is already using our public key when it tries to connect. But what if you don't have a public/private keypair? + +### Generating a Public/Private Keypair + +There's a simple command to generate a new keypair: + +``` +ssh-keygen -t ed25519 +``` + +This tells the program to generate a key using the ED25519 algorithm. There are [many algorithms](https://goteleport.com/blog/comparing-ssh-keys/) available, but not all of them are supported everywhere. The linked article goes into depth into the various algorithms and their pros and cons. + +You will be prompted for a couple of questions: + +``` +Generating public/private ed25519 key pair. +Enter file in which to save the key (/Users/rob/.ssh/id_ed25519): +Enter passphrase (empty for no passphrase): +Enter same passphrase again: +``` + +If you don't have any keys, go ahead and use the default name `id_ed25519` by just hitting ENTER. + +A Passphrase is an additional line of security on your key. However, it also adds some inconvenience around using your public key: you'll need to enter the passpharse each time your private key is accessed. Which is great for security, but kind of defeats the purpose of sharing your public key with the server to make access easier. As long as you protect your private key, you shouldn't need to worry about adding a passphrase. Press ENTER (twice) to create your keypair without a passphrase. + +``` +Your identification has been saved in id_ed25519 +Your public key has been saved in id_ed25519.pub +The key fingerprint is: +SHA256:6Qg7RQRGp1AtfVIOucEt1HtZWkYMU1LZYBVwBsXwTWQ rob@computer.local +The key's randomart image is: ++--[ED25519 256]--+ +| .B&@O+ .E +==| +| o=*= .** . o .o| +| . o . . . .| +| .o o . o ..| +| o . A * + .| +| = + = + | +| o . * . | +| . o | +| | ++----[SHA256]-----+ +``` + +:::info What's this randomart thing? + +From this [Super User answer](https://superuser.com/a/22541): + +> Validation is normally done by a comparison of meaningless strings (i.e. the hexadecimal representation of the key fingerprint), which humans are pretty slow and inaccurate at comparing. Randomart replaces this with structured images that are faster and easier to compare. + +I suppose the idea is that if humans ever needed to compare public keys they could use the randomart version and know pretty quickly whether they're the same (instead of comparing a bunch of random number and letters by eye!) + +::: + +### Adding to ssh-agent + +Our key exists but does SSH know to use it yet? Let's ask `ssh-agent` (the tool that manages keys and makes them available to the actual `ssh` process): + +``` +ssh-add -L +``` + +Do you see your new public key listed? If not, we just have to let `ssh-agent` know where it is and to start using it (note that you give the path to the private key): + +``` +ssh-add ~/.ssh/id_ed25519 +``` + +Now running `ssh-add -L` should list our key. + +:::info Missing key after computer restart + +I've had cases where my key was unknown to `ssh-agent` after a computer restart. I added the following to the `~/.zshrc` file on my computer (not the server) so that the key is added every time I start a new terminal session: + +``` +ssh-add ~/.ssh/id_ed25519 +``` + +::: + +### Adding Your SSH Public Key to the Server + +So SSH is now presenting the key to the server, but the server doesn't know what to do with it. We'll now copy our *public* key to the server so that it allows connections from it. Write your public key to the terminal so that you can copy it: + +``` +cat ~/.ssh/id_ed25519.pub +``` + +:::info + +On MacOS you can copy the key into your clipboard with this two-part command: + +``` +cat ~/.ssh/id_ed25519.pub | pbcopy +``` + +::: + +Now, connect to the server with ssh as usual (using your username/password or private key) and then open up the `~/.ssh/authorized_keys` file for editing. The `nano` editor is usually built in and is simple to use, but `vi` is another choice (if you can figure out how to exit): + +``` +nano ~/.ssh/authorized_keys +``` + +Now just paste your key into this file on a new line. It helps to add a comment above so you know which computer this key is from, maybe with the person's name and the hostname of their system. As you upgrade computers or give coworkers access to this machine you'll quickly lose track of which keys are which if you don't label them: + +``` +# Rob Cameron (optimus-prime) +ssh-ed25519 AAAAB3NzaC1yc2EAAAADAQABCU90x/khqD1sDW= rob@computer.local +``` + +Save the file and exit. Now, disconnect from the SSH session with `exit` and reconnect, but this time you shouldn't need a password or private key (if you were using `-i` you can leave that off) and simply connect with: + +``` +ssh root@192.168.0.122 +``` + +And you should be in! + +## SSH Agent Forwarding + +When connecting to a remote server, it would be nice if you could also SSH into other machines and have them identify you as *you*, on your personal computer, not as the server itself. By default this doesn't happen: making an SSH connection from your remote server uses the credentials on the server itself, meaning you'd have to go through all of the steps above to now treat the remote server as the client as whatever server *that* server wants to connect to as the host, allowing you to connect with your public key. Ugh. + +Luckily SSH has a mechanism that supports this: SSH Agent Forwarding. + +This is most useful when trying to deploy a codebase from GitHub to your remote server: you're already connected to the remote server as you, and you're already authorized to connect to GitHub, so just use those credentials. You can verify if this is already working for you: + +``` +ssh -T git@github.com +``` + +If you get a message like this: + +``` +Hi cannikin! You've successfully authenticated, but GitHub does not provide shell access. +Connection to github.com closed. +``` + +Then agent forwarding is already enabled! GitHub recognized you as your username and gave you access. The remote server forwarded on your public key (the same one that was used to connect to your remote server) and everything just worked. + +If instead you see this message: + +``` +git@github.com: Permission denied (publickey). +``` + +Then agent forwarding is not enabled. In this case we recommend this excellent guide from GitHub which walks you through enabling it: https://docs.github.com/en/developers/overview/using-ssh-agent-forwarding + +## Deploy Keys + +You may not want to use your own personal SSH keys during deploy time. One con to Agent Forwarding is that it requires that you personally (or a deploy system acting on your behalf) SSH into a machine to perform deploys, rather than letting a CI/CD system do them. Another is security: presumably your SSH keys allow full access to your repos, which is more than the read-only access needed for a deploy. + +For these reasons you may want to consider using **deploy keys**. The idea is that you generate a public/private keypair that's unique to the server(s), and then let GitHub know about the public key. Now the server(s) can connect to GitHub and clone your codebase without you being involved. And you can lock down access to that key to a single repo with read-only access. + +GitHub has a great guide for adding deploy keys to your account: https://docs.github.com/en/developers/overview/managing-deploy-keys#deploy-keys + +## Customizing the Prompt + +When deploying an app to production it can be very helpful to get a reminder of what server you're connected to, rather that seeing an IP address or random hostname at the prompt: + +``` +root@remote-server:~# +# or +user@192.168.0.122 +``` + +Is that production? Staging? Which server in the cluster? Luckily you can customize this prompt pretty easily. I like to use the app name, the environment, an a simple integer of which server ID (if its in a cluster). So if my app is called "ruby" and it's the first server in the production environment cluster, I like to see my prompt as: + +``` +root@ruby-prod1:~# +``` + +This prompt is usually specified in one of these files: + +``` +~/.bashrc +~/.bash_profile +~/.zshrc +~/.zprofile +``` + +If you you use a shell other than `bash` or `zsh` the files are going to be named differently, but the idea is the same. Open the file in `nano` or `vi` and look for a line that starts with `PS1=` (you may see a couple of lines like this): + +```bash +if [ "$color_prompt" = yes ]; then + PS1='${debian_chroot:+($debian_chroot)}\[\033[01;32m\]\u@\h\[\033[00m\]:\[\033[01;34m\]\w \$\[\033[00m\] ' +else + PS1='${debian_chroot:+($debian_chroot)}\u@\h:\w\$ ' +fi +``` + +In the config snippet above, the first `PS1` is used for color prompts and the second is for black and white. You'll want to change them both. + +Within all of that gobledy gook you should see a few special escape characters: `\u`, `\h` and `\w`. These are the **user**, **hostname** and **working directory**. You may have all or only some of these present. The rest of the characters, like `[\033[00m\]` are color codes, which we can ignore for now. + +For our simple case, just replace the `\h` with the string we want to show for the hostname: + +```bash +if [ "$color_prompt" = yes ]; then + PS1='${debian_chroot:+($debian_chroot)}\[\033[01;32m\]\u@ruby-prod1\[\033[00m\]:\[\033[01;34m\]\w \$\[\033[00m\] ' +else + PS1='${debian_chroot:+($debian_chroot)}\u@ruby-prod1:\w\$ +fi +``` + +Now save the file, and run `source` to load up the variables into the current session: + +``` +source ~/.bashrc +``` + +You should see your prompt change to and you new custom hostname! Now whenever you connect to your server you'll be sure not to run `rm -rf *` in the wrong environment. + +If you want to get real fancy with your prompt, there are some [PS1 generators](hhttps://ezprompt.net) out there that let you create the string containing all kinds of fancy stuff, and easily customize the colors. + +## Aliases for Even Easier Connections + +Seeing `ruby-prod1` helps keep track of which server we're on, but wouldn't it be great if you could just type that as a command and connect automatically? You can! + +On your local computer's `.zshrc`, `.bash_profile` whichever file, add a line like the following: + +``` +alias ruby-prod1='ssh root@192.168.0.122' +``` + +Then run `source ~/.zshrc` to execute it. Now you should be able to connect by just using the name of the server, and skip the SSH command altogether: + +``` +ruby-prod1 +``` + +It doesn't get much easier than that! + +## What's Next? + +You should now be ready to get to the next step(s) using the [Baremetal](/docs/deploy/baremetal) deploy! Baremetal does the same thing you're doing manually (SSHing into the remote server and running commands), so if you can connect to your server manually then Baremetal should be able to as well. diff --git a/docs/versioned_docs/version-7.0/introduction.md b/docs/versioned_docs/version-7.0/introduction.md new file mode 100644 index 000000000000..7a89099ca8d1 --- /dev/null +++ b/docs/versioned_docs/version-7.0/introduction.md @@ -0,0 +1,60 @@ +--- +description: Redwood is the full-stack web framework designed to help you grow from side project to startup +--- + +# Introduction + +Redwood is the full-stack web framework designed to help you grow from side project to startup. +Redwood features an end-to-end development workflow that weaves together the best parts of [React](https://reactjs.org/), [GraphQL](https://graphql.org/), [Prisma](https://www.prisma.io/), [TypeScript](https://www.typescriptlang.org/), [Jest](https://jestjs.io/), and [Storybook](https://storybook.js.org/). +For full inspiration and vision, see Redwood's [README](https://github.com/redwoodjs/redwood/blob/main/README.md). + +Development on Redwood happens in the [redwoodjs/redwood repo on GitHub](https://github.com/redwoodjs/redwood). +The docs are [there too](https://github.com/redwoodjs/redwood/tree/main/docs). +While Redwood's [leadership and maintainers](https://github.com/redwoodjs/redwood#core-team-leadership) +handle most of the high-priority items and the day-to-day, Redwood wouldn't be +where it is without [all its contributors](https://github.com/redwoodjs/redwood#all-contributors)! +Feel free to reach out to us on the [forums](https://community.redwoodjs.com) or on [Discord](https://discord.gg/redwoodjs), and follow us on [Twitter](https://twitter.com/redwoodjs) for updates. + +## Getting the Most out of Redwood + +To get the most out of Redwood, do two things: + +- [Start the tutorial](tutorial/foreword.md) +- [Join the community](https://redwoodjs.com/community) + +The tutorial is the best way to start your Redwood adventure. +It's readable, feature-ful, and fun. +You'll go all the way from `git clone` to Netlify deploy! +And by the end, you should feel comfortable enough to start that side project. + +After you've read the tutorial and started your side project, come say hi and tell us all about it by joining the community. +Redwood wouldn't be where it is without the people who use and contribute to it. +We warmly welcome you! + +## How these Docs are Organized + +As you can probably tell from the sidebar, Redwood's docs are organized into three sections: + +- [Tutorial](tutorial/foreword.md) +- [Reference](index) +- [How To](how-to/index) + +The order isn't arbitrary. +This is more or less the learning journey we have in mind for you. + +While we expect you to read the tutorial from top to bottom (maybe even more than once?), we of course don't expect you to read the Reference and How To sections that way. +The content in those sections is there on an as-needed basis. +You need to know about the Router? Check out the [Router](router.md) reference. +You need to upload files? Check out the [File Uploads](how-to/file-uploads.md) how to. + +That said, there are some references you should consider reading at some point in your Redwood learning journey. +Especially if you want to become an advanced user. +For example, [Services](services.md) are fundamental to Redwood. +It's worth getting to know them inside and out. +And if you're not writing [tests](testing.md) and [stories](storybook.md), you're not using Redwood to its full potential. + +> **We realize that the content doesn't always match the organization** +> +> For example, half the [Testing](testing.md) reference reads like a tutorial, and half the [Logger](logger.md) reference read like a how to. +> Till now, we've focused on coverage, making sure we had content on all of Redwood's feature somewhere at least. +> We'll shift our focus to organization and pay more attention to how we can curate the experience. diff --git a/docs/versioned_docs/version-7.0/local-postgres-setup.md b/docs/versioned_docs/version-7.0/local-postgres-setup.md new file mode 100644 index 000000000000..5facde1d7929 --- /dev/null +++ b/docs/versioned_docs/version-7.0/local-postgres-setup.md @@ -0,0 +1,166 @@ +--- +description: Setup a Postgres database to develop locally +--- + +# Local Postgres Setup + +RedwoodJS uses a SQLite database by default. While SQLite makes local development easy, you're +likely going to want to run the same database you use in production locally at some point. And since the odds of that database being Postgres are high, here's how to set up Postgres. + +## Install Postgres +### Mac +If you're on a Mac, we recommend using Homebrew: + +```bash +brew install postgresql@14 +``` + +> **Install Postgres? I've messed up my Postgres installation so many times, I wish I could just uninstall everything and start over!** +> +> We've been there before. For those of you on a Mac, [this video](https://www.youtube.com/watch?v=1aybOgni7lI) is a great resource on how to wipe the various Postgres installs off your machine so you can get back to a blank slate. +> Obviously, warning! This resource will teach you how to wipe the various Postgres installs off your machine. Please only do it if you know you can! + +### Windows and Other Platforms +If you're using another platform, see Prisma's [Data Guide](https://www.prisma.io/docs/guides/database-workflows/setting-up-a-database/postgresql) for detailed instructions on how to get up and running. + +## Creating a database + +If everything went well, then Postgres should be running and you should have a few commands at your disposal (namely, `psql`, `createdb`, and `dropdb`). + +Check that Postgres is running with `brew services` (the `$(whoami)` bit in the code block below is just where your username should appear): + +```bash +$ brew services +Name Status User Plist +postgresql started $(whoami) /Users/$(whoami)/Library/LaunchAgents/homebrew.mxcl.postgresql.plist +``` + +If it's not started, start it with: + +```bash +brew services start postgresql +``` + +Great. Now let's try running the PostgresQL interactive terminal, `psql`: + +```bash +$ psql +``` + +You'll probably get an error like: + +```bash +psql: error: FATAL: database $(whoami) does not exist +``` + +This is because `psql` tries to log you into a database of the same name as your user. But if you just installed Postgres, odds are that database doesn't exist. + +Luckily it's super easy to create one using another of the commands you got, `createdb`: + +```bash +$ createdb $(whoami) +``` + +Now try: + +``` +$ psql +psql (13.1) +Type "help" for help. + +$(whoami)=# +``` + +If it worked, you should see a prompt like the one above—your username followed by `=#`. You're in the PostgreSQL interactive terminal! While we won't get into `psql`, here's a few the commands you should know: + +- `\q` — quit (super important!) +- `\l` — list databases +- `\?` — get a list of commands + +If you'd rather not follow any of the advice here and create another Postgres user instead of a Postgres database, follow [this](https://www.digitalocean.com/community/tutorials/how-to-install-and-use-postgresql-on-ubuntu-18-04#step-3-%E2%80%94-creating-a-new-role). + +## Update the Prisma Schema + +Tell Prisma to use a Postgres database instead of SQLite by updating the `provider` attribute in your +`schema.prisma` file: + +```graphql title="api/db/schema.prisma" +datasource db { + provider = "postgresql" + url = env("DATABASE_URL") +} +``` +> Note: If you run into a "PrismaClientInitializationError" then you may need to regenerate the prisma client using: `yarn rw prisma generate` + +## Connect to Postgres + +Add a `DATABASE_URL` to your `.env` file with the URL of the database you'd like to use locally. The +following example uses `redwoodblog_dev` for the database. It also has `postgres` setup as a +superuser for ease of use. +```env +DATABASE_URL="postgresql://postgres@localhost:5432/redwoodblog_dev?connection_limit=1" +``` + +Note the `connection_limit` parameter. This is [recommended by Prisma](https://www.prisma.io/docs/reference/tools-and-interfaces/prisma-client/deployment#recommended-connection-limit) when working with +relational databases in a Serverless context. You should also append this parameter to your production +`DATABASE_URL` when configuring your deployments. + +### Local Test DB +You should also set up a test database similarly by adding `TEST_DATABASE_URL` to your `.env` file. +```env +TEST_DATABASE_URL="postgresql://postgres@localhost:5432/redwoodblog_test?connection_limit=1" +``` + +> Note: local postgres server will need manual start/stop -- this is not handled automatically by RW CLI in a manner similar to sqlite + +### Base URL and path + +Here is an example of the structure of the base URL and the path using placeholder values in uppercase letters: +```bash +postgresql://USER:PASSWORD@HOST:PORT/DATABASE +``` +The following components make up the base URL of your database, they are always required: + +| Name | Placeholder | Description | +| ------ | ------ | ------| +| Host | `HOST`| IP address/domain of your database server, e.g. `localhost` | +| Port | `PORT` | Port on which your database server is running, e.g. `5432` | +| User | `USER` | Name of your database user, e.g. `postgres` | +| Password | `PASSWORD` | password of your database user | +| Database | `DATABASE` | Name of the database you want to use, e.g. `redwoodblog_dev` | + +## Migrations +Migrations are snapshots of your DB structure, which, when applied, manage the structure of both your local development DB and your production DB. + +To create and apply a migration to the Postgres database specified in your `.env`, run the _migrate_ command. (Did this return an error? If so, see "Migrate from SQLite..." below.): +```bash +yarn redwood prisma migrate dev +``` + +### Migrate from SQLite to Postgres +If you've already created migrations using SQLite, e.g. you have a migrations directory at `api/db/migrations`, follow this two-step process. + +#### 1. Remove existing migrations +**For Linux and Mac OS** +From your project root directory, run either command corresponding to your OS. +```bash +rm -rf api/db/migrations +``` + +**For Windows OS** +```bash +rmdir /s api\db\migrations +``` + +> Note: depending on your project configuration, your migrations may instead be located in `api/prisma/migrations` + +#### 2. Create a new migration +Run this command to create and apply a new migration to your local Postgres DB: +```bash +yarn redwood prisma migrate dev +``` + +## DB Management Tools +Here are our recommendations in case you need a tool to manage your databases: +- [TablePlus](https://tableplus.com/) (Mac, Windows) +- [Beekeeper Studio](https://www.beekeeperstudio.io/) (Linux, Mac, Windows - Open Source) diff --git a/docs/versioned_docs/version-7.0/logger.md b/docs/versioned_docs/version-7.0/logger.md new file mode 100644 index 000000000000..e93ad213d7ee --- /dev/null +++ b/docs/versioned_docs/version-7.0/logger.md @@ -0,0 +1,788 @@ +--- +title: Logging +description: Use the Logger to observe your application +--- + +# Logger + +RedwoodJS provides an opinionated logger with sensible, practical defaults that grants you visibility into the applications while you're developing and after you have deployed. + +Logging in the serverless ecosystem is not trivial and neither is its configuration. Redwood aims to make this easier. + +When choosing a Node.js logger to add to the framework, RedwoodJS required that it: + +- Have a low-overhead, and be fast +- Output helpful, readable information in development +- Be highly configurable to set log levels, time formatting, and more +- Support key redaction to prevent passwords or tokens from leaking out +- Save to a file in local (or other) environments that can write to the file system +- Stream to third-party log and application monitoring services vital to production logging in serverless environments like [LogFlare](https://logflare.app/), [Datadog](https://www.datadoghq.com/) or [LogDNA](https://www.logdna.com/) +- Hook into [Prisma logging](https://www.prisma.io/docs/concepts/components/prisma-client/working-with-prismaclient/logging) to give visibility into connection issues, slow queries, and any unexpected errors +- Have a solid Developer experience (DX) to get logging out-of-the-gate quickly +- Use a compact configuration to set how to log (its `options`) and where to log -- file, stdout, or remote transport stream -- (its `destination`) + +With those criteria in mind, Redwood includes [pino](https://github.com/pinojs/pino) with its rich [features](https://github.com/pinojs/pino/blob/master/docs/api.md), [ecosystem](https://github.com/pinojs/pino/blob/master/docs/ecosystem.md) and [community](https://github.com/pinojs/pino/blob/master/docs/ecosystem.md#community). + +Plus ... pino means 🌲 pine tree! How perfect is that for RedwoodJS? + +Note: RedwoodJS logging is setup for its api side only. For browser and web side error reporting or exception handling, these features will be considered in future releases. + +## Quick Start + +To start 🌲🪓 api-side logging, just + +- import the logger in your service, function, or any other lib +- use `logger` with the level just as you might have with `console` + +```jsx title="api/lib/logger.ts" +import { createLogger } from '@redwoodjs/api/logger' + +/** + * Creates a logger. Options define how to log. Destination defines where to log. + * If no destination, std out. + */ +export const logger = createLogger({}) + +// then, in your api service, lib, or function +import { logger } from 'src/lib/logger' + +//... + +logger.trace(`>> items service -> About to save item ${item.name}`) +logger.info(`Saving item ${item.name}`) +logger.debug({ item }, `Item ${item.name} detail`) +logger.warn(item, `Item ${item.id} is missing a name`) +logger.warn({ missing: { name: item.name } }, `Item ${item.id} is missing values`) +logger.error(error, `Failed to save item`) +``` + +That's it! + +### Manual Setup for RedwoodJS Upgrade + +If you are upgrading an existing RedwoodJS app older than v0.28 and would like to include logging, you simply need to copy over files from the "Create Redwood Application" template: + +- Copy [`packages/create-redwood-app/template/api/src/lib/logger.ts`](https://github.com/redwoodjs/redwood/blob/main/packages/create-redwood-app/template/api/src/lib/logger.ts) to `api/src/lib/logger.ts`. Required. + +For optional Prisma logging: + +- Copy [`packages/create-redwood-app/template/api/src/lib/db.ts`](https://github.com/redwoodjs/redwood/blob/main/packages/create-redwood-app/template/api/src/lib/db.ts) and replace `api/src/lib/db.ts` (or `api/src/lib/db.js`). _Optional_. + +The first file `logger.ts` defines the logger instance. You will import `logger` and use in your services, functions or other libraries. You may then replace existing `console.log()` statements with `logger.info()` or `logger.debug()`. + +The second `db.ts` replaces how the `db` Prisma client instance is declared and exported. It configures Prisma logging, if desired. See below for more information on Prisma logging options. + +## Options aka How to Log + +In addition to the rich [features](https://github.com/pinojs/pino/blob/master/docs/api.md) that [pino](https://github.com/pinojs/pino) offers, RedwoodJS has added some sensible, practical defaults to make the logger DX first-rate. + +### Log Level + +One of 'fatal', 'error', 'warn', 'info', 'debug', 'trace' or 'silent'. + +The logger detects your current environment and will default to a sensible minimum log level. + +> **_NOTE:_** In Development, the default is `trace` while in Production, the default is `warn`. +> This means that output in your dev server can be verbose, but when you deploy you won't miss out on critical issues. + +You can override the default log level via the `LOG_LEVEL` environment variable or the `level` LoggerOption. + +The 'silent' level disables logging. + +### Troubleshooting + +> If you are not seeing log output when deployed, consider setting the level to `info` or `debug`. + +```jsx +import { createLogger } from '@redwoodjs/api/logger' + +/** + * Creates a logger with RedwoodLoggerOptions + * + * These extend and override default LoggerOptions, + * can define a destination like a file or other supported pino log transport stream, + * and sets whether or not to show the logger configuration settings (defaults to false) + * + * @param RedwoodLoggerOptions + * + * RedwoodLoggerOptions have + * @param {options} LoggerOptions - defines how to log, such as redaction and format + * @param {string | DestinationStream} destination - defines where to log, such as a transport stream or file + * @param {boolean} showConfig - whether to display logger configuration on initialization + */ +export const logger = createLogger({ options: { level: 'info' } }) +``` + +Please refer to the [Pino options documentation](https://github.com/pinojs/pino/blob/master/docs/api.md#options) for a complete list. + +### Redaction + +Everyone has heard of reports that Company X logged emails, or passwords, to files or systems that may not have been secured. While RedwoodJS logging won't necessarily prevent that, it does provide you with the mechanism to ensure that it won't happen. + +To redact sensitive information, you can supply paths to keys that hold sensitive data using the [redact option](https://github.com/pinojs/pino/blob/master/docs/redaction.md). + +We've included a default set called the `redactionsList` that includes keys such as + +``` + 'access_token', + 'accessToken', + 'DATABASE_URL', + 'email', + 'event.headers.authorization', + 'host', + 'jwt', + 'JWT', + 'password', + 'params', + 'secret', +``` + +You may wish to augment these defaults via the `redact` configuration setting, here adding a Social Security Number and Credit Card Number key to the list. + +```jsx +/** + * Custom redaction list + */ +import { redactionsList } from '@redwoodjs/api/logger' + +//... + +export const logger = createLogger({ + options: { redact: [...redactionsList, 'ssn,credit_card_number'] }, +}) +``` + +Note: Unless you provide the current `redactionsList` with the defaults, just the keys `'ssn,credit_card_number'` will be redacted. + +### Log Formatter (formerly known as "Pretty Printing") + +> **_Important:_** As of version 0.41, "pretty printing" with pino is no longer supported due to pino having deprecated the `pino-pretty` package and the accepted practice of not pretty printing in production due to overhead and not being able to send these formatted logs to transports. + +No log is worth logging if you cannot read it. + +RedwoodJS provides a `LogFormatter` that adds color, emoji, time formatting and level reporting so you can quickly see what is going on. + +It is based on [pino-colada](https://github.com/lrlna/pino-colada/blob/master/README.md): a cute [ndjson](http://ndjson.org) formatter for [pino](https://github.com/pinojs/pino). + +#### Command + +The `LogFormatter` is distributed as a bin that can be invoke via the `yarn rw-log-formatter` command + +To pipe logs to the formatter: + +```bash +echo "{\"level\": 30, \"message\": \"Hello RedwoodJS\"}" | yarn rw-log-formatter +``` + +Output: + +```bash +11:00:28 🌲 Hello RedwoodJS +✨ Done in 0.14s. +``` + +#### Usage + +Log formatting is automatically setup in the `yarn rw dev` command. + +```bash +yarn rw dev +``` + +You may also pipe logs to the formatter when using `rw serve`: + +```bash +yarn rw serve | yarn rw-log-formatter +yarn rw serve api | yarn rw-log-formatter +``` + +> Note: Since `rw serve` sets the Node environment to `production` you will not see log non-warn/error output unless you configure your logging level to `debug` or below. + +You'll see that formatted output by default when you launch your RedwoodJS app using: + +```bash +yarn rw dev +``` + +### Examples + +The following examples and screenshots show how log formatting output may look in your development environment. + +Notice how the emoji help identify the level, such as 🐛 for `debug` and 🌲 for `info`. + +#### Basic + +Simple request and with basic GraphQL output. + +![Screen Shot 2021-12-22 at 1 41 46 PM](https://user-images.githubusercontent.com/1051633/147141091-ab27e5f0-4b90-4114-9452-c095df5e2516.png) + +#### With a Custom Payload + +Sometimes you will want to log a 🗒 Custom message or payload object that isn't one of the predefined `query` or `data` options. + +> In these examples, the `post` is a blog post with a `id`, `title`, `commentCount`, and `description`. + +You can use the `custom` option: + +```tsx +logger.debug({ custom: post.title }, 'The title of a Post') +``` + +Or, you can also log a custom object payload: + +```tsx +logger.debug( + { + custom: { + title: post.title, + comments: post.commentCount, + }, + }, + 'Post with count of comments' +) +``` + +Or, a more nested payload: + +```tsx +logger.debug( + { + custom: { + title: post.title, + details: { + id: post.id, + description: post.description, + comments: post.commentCount, + }, + }, + }, + 'Post details' +) +``` + +Or, an entire object: + +```tsx +logger.debug( + { + custom: post, + }, + 'Post details' +) +``` + +#### With GraphQL Options + +Logging with extended GraphQL output that includes: + +- 🏷 GraphQL Operation Name +- 🔭 GraphQL Query +- 📦 GraphQL Data + +![Screen Shot 2021-12-22 at 1 43 11 PM](https://user-images.githubusercontent.com/1051633/147141089-20a41441-1038-4fee-a599-12f78d83a31d.png) + +#### With Prisma Queries + +Logging with Prisma query statement output. + +![Screen Shot 2021-12-22 at 1 44 20 PM](https://user-images.githubusercontent.com/1051633/147141082-7cfd417a-28bf-4020-8547-96c33972b7ce.png) + +#### GraphQL Logging + +Redwood-specific [GraphQL log data](graphql.md#logging) included by the the `useRedwoodLogger` envelop plug-in is supported: + +- Request Id +- User-Agent +- GraphQL Operation Name +- GraphQL Query +- GraphQL Data + +#### Production Logging + +By the way, when logging in production, you may want to: + +- send the logs as [ndjson](http://ndjson.org) to your host's log handler or application monitoring service to process, store and display. Therefore, you would not format your logs with `LogFormatter`. +- log only `warn` and `errors` to avoid chatty `info` or `debug` messages (that would be better suited for a staging or integration environment) + +### Nested Logging + +Since you can log metadata information alongside your message as seen in: + +```jsx +logger.debug({ item }, `Item ${item.name} detail`) +logger.warn(item, `Item ${item.id} is missing a name`) +logger.warn({ missing: { name: item.name } }, `Item ${item.id} is missing values`) +logger.error(error, `Failed to save item`) +``` + +There could be cases where a key in that metadata collides with a key needed by pino or your third-party transport. + +To prevent collisions and overwriting values, you can nest your metadata in `log` or `payload` (or some other attribute). + +```jsx +nestedKey: 'log', +``` + +Note: If you use `nestedKey` logging, you will have to manually set any `redact` options to include the `nestedKey` values as a prefix. + +For example, if your nestedKey is `'log`, then instead of redacting `email` you will have to redact `log.email`. + +### Destination aka Where to Log + +The `destination` option allows you to specify where to send the api-side log statements: to standard output, file, or transport stream. + +### Dev Server + +When in your development environment, logs will be output to the dev server's standard output. + +### Log to File + +If you are in your development environment (or another environment in which you have write access to the filesystem) you can set the `destination` to the location of your file. + +Note: logging to a file is not permitted if deployed to Netlify or Vercel. + +```jsx +/** + * Log to a File + */ +export const logger = createLogger({ + //options: {}, + destination: '/path/to/file/api.log', +}) +``` + +### Transport Streams + +Since each serverless function is ephemeral, its logging output is, too. Unless you monitor that function log just at the right time, you'll miss critical warnings, errors, or exceptions. + +It's recommended then to log to a "transport" stream when deployed to production so that logs are stored and searchable. + +Pino offers [several transports](https://github.com/pinojs/pino/blob/HEAD/docs/transports.md#known-transports) that can send your logs to a remote destination. A ["transport"](https://github.com/pinojs/pino/blob/HEAD/docs/transports.md) for pino is a supplementary tool which consumes pino logs. + +See below for examples of how to configure Logflare and Datadog. + +Note that not all [known pino transports](https://github.com/pinojs/pino/blob/HEAD/docs/transports.md#known-transports) can be used in a serverless environment. + +## Default Configuration Overview + +RedwoodJS provides an opinionated logger with sensible, practical defaults. These include: + +- Colorize and emojify output with a custom LogFormatter +- Ignore certain event attributes like hostname and pid for cleaner log statements +- Prefix the log output with log level +- Use a shorted log message that omits server name +- Humanize time in GMT +- Set the default log level in dev or test to trace +- Set the default log level in prod to warn +- Note you may override the default log level via the LOG_LEVEL environment variable +- Redact the host and other keys via a set redactionsList + +## Configuration Examples + +Some examples of common configurations and overrides that demonstrate how you can have control over both how and where you log. + +### Override Log Level + +You can set the minimum [level](#log-level) to log via the `level` option. This is useful if you need to override the default Production settings (just `warn` and `error`) to in this case `debug`. + +```jsx +/** + * Override minimum log level to debug + */ +export const logger = createLogger({ + options: { level: 'debug' }, +}) +``` + +### Customize a Redactions List + +While the logger provides a default redaction list, you can specify additional keys to redact by either appending them to the list or setting the `redact` option to a new array of keys. + +Please see [pino's redaction documentation](https://github.com/pinojs/pino/blob/master/docs/redaction.md) for other `redact` options, such as removing both keys and values and path matching. + +```jsx +/** + * Customize a redactions list to add `my_secret_key` + */ +import { redactionsList } from '@redwoodjs/api/logger' + +export const logger = createLogger({ + options: { redact: [...redactionsList, 'my_secret_key'] }, +}) +``` + +### Log to a Physical File + +If in your development environment or another environment in which you have write access to the filesystem, can can set the `destination` to the location of your file. + +Note: logging to a file is not permitted if deployed to Netlify or Vercel. + +```jsx +/** + * Log to a File + */ +export const logger = createLogger({ + options: {}, + destination: '/path/to/file/api.log', +}) +``` + +### Customize your own Transport Stream Destination, eg: with Honeybadger + +If `pino` doesn't have a transport package for your service, you can write one with the class `Write` from the `stream` package. You can adapt this example to your own logging needs but here, we will use [Honeybadger.io](https://honeybadger.io). + +- Install the `stream` package into `api` + +```shell +yarn workspace api add stream +``` + +- Install the `honeybadger-io/js` package into `api`, or any other package that suits you + +```shell +yarn workspace api add @honeybadger-io/js +``` + +- Import both `stream` and `@honeybadger-io/js` into `api/src/lib/logger.ts` + +```jsx +import { createLogger } from '@redwoodjs/api/logger' +import { Writable } from 'stream' + +const Honeybadger = require('@honeybadger-io/js') + +Honeybadger.configure({ + apiKey: process.env.HONEYBADGER_API_KEY, +}) + +const HoneybadgerStream = () => { + const stream = new Writable({ + write(chunk: any, encoding: BufferEncoding, fnOnFlush: (error?: Error | null) => void) { + Honeybadger.notify(chunk.toString()) + fnOnFlush() + }, + }) + + return stream +} + +/** + * Creates a logger. Options define how to log. Destination defines where to log. + * If no destination, std out. + */ +export const logger = createLogger({ + options: { level: 'debug' }, + destination: HoneybadgerStream(), +}) +``` + +- For the sake of our example, make sure you have a `HONEYBADGER_API_KEY` variable in your environment. + +Documentation on the `Write` class can be found here: [https://nodejs.org/api/stream.html](https://nodejs.org/api/stream.html#stream_writable_write_chunk_encoding_callback) + +### Log to Datadog using a Transport Stream Destination + +To stream your logs to [Datadog](https://www.datadoghq.com/), you can + +- Install the [`pino-datadog`](https://www.npmjs.com/package/pino-datadog) package into `api` + +```bash +yarn workspace api add pino-datadog +``` + +- Import `pino-datadog` into `api/src/lib/logger.ts` +- Configure the `stream` with your API key and [settings](https://github.com/ovhemert/pino-datadog/blob/master/docs/API.md) +- Set the logger `destination` to the `stream` + +```jsx +/** + * Stream logs to Datadog + */ +// api/src/lib/logger.ts +import datadog from 'pino-datadog' + +/** + * Creates a synchronous pino-datadog stream + * + * @param {object} options - Datadog options including your account's API Key + * + * @typedef {DestinationStream} + */ +export const stream = datadog.createWriteStreamSync({ + apiKey: process.env.DATADOG_API_KEY, + ddsource: 'my-source-name', + ddtags: 'tag,not,it', + service: 'my-service-name', + size: 1, +}) + +/** + * Creates a logger with RedwoodLoggerOptions + * + * These extend and override default LoggerOptions, + * can define a destination like a file or other supported pino log transport stream, + * and sets whether or not to show the logger configuration settings (defaults to false) + * + * @param RedwoodLoggerOptions + * + * RedwoodLoggerOptions have + * @param {options} LoggerOptions - defines how to log, such as redaction and format + * @param {string | DestinationStream} destination - defines where to log, such as a transport stream or file + * @param {boolean} showConfig - whether to display logger configuration on initialization + */ +export const logger = createLogger({ + options: {}, + destination: stream, +}) +``` + +### Log to Logflare using a Transport Stream Destination + +- Install the [`pino-logflare`](https://www.npmjs.com/package/pino-logflare) package into `api` + +```bash +yarn workspace api add pino-logflare +``` + +- Import `pino-logflare` into `api/src/lib/logger.ts` +- Configure the `stream` with your [API key and sourceToken](https://github.com/Logflare/pino-logflare/blob/master/docs/API.md) +- Set the logger `destination` to the `stream` + +```jsx title="api/src/lib/logger.ts" +import { createWriteStream } from 'pino-logflare' + +/** + * Creates a pino-logflare stream + * + * @param {object} options - Logflare options including + * your account's API Key and source token id + * + * @typedef {DestinationStream} + */ +export const stream = createWriteStream({ + apiKey: process.env.LOGFLARE_API_KEY, + sourceToken: process.env.LOGFLARE_SOURCE_TOKEN, +}) + +export const logger = createLogger({ + options: {}, + destination: stream, +}) +``` + +### Log to logDNA using a Transport Stream Destination + +- Install the [pino-logdna](https://www.npmjs.com/package/pino-logdna) package into `api` + +```bash +yarn workspace api add pino-logdna +``` + +- Import `pino-logdna` into `api/src/lib/logger.ts` +- Configure the `stream` with your [ingestion key](https://github.com/Logflare/pino-logflare/blob/master/docs/API.md) +- Set the logger `destination` to the `stream` + +```jsx title="api/src/lib/logger.ts" +import pinoLogDna from 'pino-logdna' + +const stream = pinoLogDna({ + key: process.env.LOGDNA_INGESTION_KEY, + onError: console.error, +}) + +/** + * Creates a logger with RedwoodLoggerOptions + * + * These extend and override default LoggerOptions, + * can define a destination like a file or other supported pino log transport stream, + * and sets whether or not to show the logger configuration settings (defaults to false) + * + * @param RedwoodLoggerOptions + * + * RedwoodLoggerOptions have + * @param {options} LoggerOptions - defines how to log, such as redaction and format + * @param {string | DestinationStream} destination - defines where to log, such as a transport stream or file + * @param {boolean} showConfig - whether to display logger configuration on initialization + */ +export const logger = createLogger({ + options: {}, + destination: stream, +}) +``` + +### Log to Papertrail using a Transport Stream Destination + +- Install the [pino-papertrail](https://www.npmjs.com/package/pino-papertrail) package into `api` + +```bash +yarn workspace api add pino-papertrail +``` + +- Import `pino-papertrail` into `logger.ts` +- Configure the `stream` in your Papertrail `options` with your appname's [configuration settings](https://github.com/ovhemert/pino-papertrail/blob/master/docs/API.md#options) +- Set the logger `destination` to the `stream` + +```jsx +import papertrail from 'pino-papertrail' + +const stream = papertrail.createWriteStream({ + appname: 'my-app', + host: '*****.papertrailapp.com', + port: '*****', +}) + +/** + * Creates a logger with RedwoodLoggerOptions + * + * These extend and override default LoggerOptions, + * can define a destination like a file or other supported pino log transport stream, + * and sets whether or not to show the logger configuration settings (defaults to false) + * + * @param RedwoodLoggerOptions + * + * RedwoodLoggerOptions have + * @param {options} LoggerOptions - defines how to log, such as redaction and format + * @param {string | DestinationStream} destination - defines where to log, such as a transport stream or file + * @param {boolean} showConfig - whether to display logger configuration on initialization + */ +export const logger = createLogger({ + options: {}, + destination: stream, +}) +``` + +## Papertrail Options + +You can pass the [following properties](https://github.com/ovhemert/pino-papertrail/blob/master/docs/API.md) in an options object: + +| Property | Type | Description | +| ------------------------------------------------------- | ----------------- | --------------------------------------------------- | +| appname (default: pino) | string | Application name | +| host (default: localhost) | string | Papertrail destination address | +| port (default: 1234) | number | Papertrail destination port | +| connection (default: udp) | string | Papertrail connection method (tls/tcp/udp) | +| echo (default: true) | boolean | Echo messages to the console | +| message-only (default: false) | boolean | Only send msg property as message to papertrail | +| backoff-strategy (default: `new ExponentialStrategy()`) | [BackoffStrategy] | Retry backoff strategy for any tls/tcp socket error | + +[backoffstrategy]: https://github.com/MathieuTurcotte/node-backoff#interface-backoffstrategy + +### Prisma Logging + +Redwood declares an instance of the PrismaClient + +Prisma is configured to log at the: + +- info +- warn +- error + +levels via `emitLogLevels`. + +One may also log _every_ query by adding the `query` level to + +```jsx +log: emitLogLevels(['info', 'warn', 'error', 'query']), +``` + +If you wish to remove `info` logging, then you can define a set of levels, such as `['warn', 'error']`. + +To configure Prisma logging, you first create the client and set the `log` options to emit the levels you wish to be logged via `emitLogLevels`. Second, you instruct the `logger` to handle the events emitted by the Prisma client in `handlePrismaLogging` setting the instance of the Prisma Client you've created in `db`, the `logger` instances, and then the same levels you've told the client to emit. + +Both `emitLogLevels` and `handlePrismaLogging` are `@redwoodjs/api/logger` package exports. + +```jsx +/* + * Instance of the Prisma Client + */ +export const db = new PrismaClient({ + log: emitLogLevels(['info', 'warn', 'error']), +}) + +handlePrismaLogging({ + db, + logger, + logLevels: ['info', 'warn', 'error'], +}) +``` + +See: The Prisma Client References documentation on [Logging](https://www.prisma.io/docs/reference/api-reference/prisma-client-reference#log). + +#### Slow Queries + +If `query` Prisma level logging is enabled and the `debug` level is enabled on the Logger then all query statements will be logged. + +Otherwise, any query exceeding a threshold duration will be logged on the `warn` level. + +The default threshold duration is 2 seconds. You can also pass `slowQueryThreshold` as an option to customize this duration when setting up Prisma logger. For example: + +```jsx +handlePrismaLogging({ + db, + logger, + logLevels: ['query', 'info', 'warn', 'error'], + slowQueryThreshold: 5_000, // in ms +}) +``` + +### Advanced Use + +There are situations when you may wish to add information to every log statement. + +This may be accomplished via [child loggers](https://github.com/pinojs/pino/blob/master/docs/child-loggers.md). + +#### GraphQL Service / Event Logger + +Examples to come. (PRs welcome.) + +#### Flushing the Log + +Flush the content of the buffer when an asynchronous destination: + +```jsx +logger.flush() +``` + +The use case is primarily for asynchronous logging, which may buffer log lines while others are being written. + +#### Child Loggers + +A child logger let's you add information to every log statement output. + +See: [pino's Child Loggers documentation](https://github.com/pinojs/pino/blob/master/docs/child-loggers.md) + +For example: + +```jsx +import { db } from 'src/lib/db' +import { logger } from 'src/lib/logger' + +export const userExamples = ({}, { info }) => { + // Adds path to the log + const childLogger = logger.child({ path: info.fieldName }) + childLogger.trace('I am in find many user examples resolver') + return db.userExample.findMany() +} + +export const userExample = async ({ id }, { info }) => { + // Adds id and the path to the log + const childLogger = logger.child({ id, path: info.fieldName }) + childLogger.trace('I am in the find a user example by id resolver') + const result = await db.userExample.findUnique({ + where: { id }, + }) + + // Since this is the child logger, here id and path will be included as well + childLogger.debug({ ...result }, 'This is the detail for the user') + + return result +} +``` + +The Redwood logger uses a child logger to inject the Prisma Client version into every Prisma log statement: + +```jsx +logger.child({ + prisma: { clientVersion: db['_clientVersion'] }, +}) +``` diff --git a/docs/versioned_docs/version-7.0/mailer.md b/docs/versioned_docs/version-7.0/mailer.md new file mode 100644 index 000000000000..923077849f7f --- /dev/null +++ b/docs/versioned_docs/version-7.0/mailer.md @@ -0,0 +1,283 @@ +# Mailer + +RedwoodJS offers a convenient Mailer for sending emails to your users. It's not just about sending an email; delivery matters too. The way you deliver the feature requiring email is as significant as how you prepare the mail to be delivered by the infrastructure that sends emails over the internet. + +When designing the Mailer, it was crucial that mail could be: + +* sent by popular third-party services like [Resend](), [SendGrid](), [Postmark](), [Amazon SES](), and others. +* sent by [Nodemailer]() as a self-hosted OSS solution. +* use different providers depending on the use case. For instance, some transactional emails might be sent via Resend and some digest emails sent by SES. You should be able to choose the method for a specific email. +* send safely in both development and test environments in a "sandbox" without worrying that emails might accidentally leak. +* be sent as text and/or html and composed using templates by popular tools like [React Email]() or [MJML](), with support for more methods in the future. +* unit tested to set the proper to, from, cc, subject, body, and more. +* integrated with RedwoodJS Studio to help design and preview templates. + +The RedwoodJS Mailer does more than "just send an email". It is a complete end-to-end design, development, and testing package for emails. + +## Overview + +The RedwoodJS Mailer consists of [handlers](#handlers) and [renderers](#renderers), which carry out the core functionality of sending (handling) your emails and composing (rendering) your emails, respectively. This is combined with a few required files which define the necessary configuration. + +A high-level overview of the Mailer Flow is shown in the diagram below, and each case is covered in more detail below the diagram. +mailer-flow + +### Renderers + +A **renderer** transforms your React components into strings of text or HTML that can be sent as an email. + +Mailer currently offers the following renderers: +* [@redwoodjs/mailer-renderer-react-email]() based on [React Email]() +* [@redwoodjs/mailer-renderer-mjml-react]() based on [MJML]() + +You can find community-maintained renderers by searching across npm, our forums, and other community spaces. + +:::important + +Email clients are notoriously inconsistent in how they render HTML into the visual email content. Consider using a robust react library to help you write components that produce attractive emails, rendered consistently across email clients. + +::: + +### Handlers + +A **handler** is responsible for taking your rendered content and passing it on to a service that can send your email to the intended recipients, e.g., Nodemailer or Amazon SES. + +Mailer currently offers the following handlers: +* [@redwoodjs/mailer-handler-in-memory](), a simple in-memory handler typically used for testing. +* [@redwoodjs/mailer-handler-nodemailer](), which uses [Nodemailer](). +* [@redwoodjs/mailer-handler-studio](), which sends emails to the RedwoodJS Studio using nodemailer internally. +* [@redwoodjs/mailer-handler-resend](), which uses [Resend](). + +You can find community-maintained handlers by searching across npm, our forums, and other community spaces. + +### Files & Directories + +The core file for the Mailer functions is `api/src/lib/mailer.ts`. This file contains configuration defining which handlers and renderers to use and when. It starts out looking like this: +```ts title=api/src/lib/mailer.ts +import { Mailer } from '@redwoodjs/mailer-core' +import { NodemailerMailHandler } from '@redwoodjs/mailer-handler-nodemailer' +import { ReactEmailRenderer } from '@redwoodjs/mailer-renderer-react-email' + +import { logger } from 'src/lib/logger' + +export const mailer = new Mailer({ + handling: { + handlers: { + // TODO: Update this handler config or switch it out for a different handler completely + nodemailer: new NodemailerMailHandler({ + transport: { + host: 'localhost', + port: 4319, + secure: false, + }, + }), + }, + default: 'nodemailer', + }, + + rendering: { + renderers: { + reactEmail: new ReactEmailRenderer(), + }, + default: 'reactEmail', + }, + + logger, +}) +``` + +In the above, you can see how handlers and renderers are defined. Handlers are defined in the `handling` object where the keys are any name you wish to give, and the values are instances of the handler you want to use. Similarly for renderers, which are defined in the `rendering` object. Each must have a `default` provided, specifying which option to use by default in production. + +Mailer also expects you to put your mail react components inside the `api/src/mail` directory. For example, if you had a welcome email, it should be found in `api/src/mail/Welcome/Welcome.tsx`. + +## Setup + +The Mailer is not set up by default when you create a new RedwoodJS app, but it is easy to do so. Simply run the following CLI command: + +```bash title="RedwoodJS CLI" +yarn rw setup mailer +``` + +This command sets up the necessary files and dependencies. You can find more information on this command at [this](https://redwoodjs.com/docs/cli-commands#setup-mailer) specific section of our docs. + +## Usage + +### Example + +The best way to understand using the Mailer is with an example. + +In the tutorial, we built out a blog site. Let's say we have added a contact us functionality and the contact us form takes a name, email, and message and stores it in the database. + +For this example, suppose we want to also send an email to some internal inbox with this contact us submission. + +The service would be updated like so: + +```ts title=api/src/services/contacts.ts +import { mailer } from 'src/lib/mailer' +import { ContactUsEmail } from 'src/mail/Example/Example' + +// ... + +export const createContact: MutationResolvers['createContact'] = async ({ + input, +}) => { + const contact = await db.contact.create({ + data: input, + }) + + // Send email + await mailer.send( + ContactUsEmail({ + name: input.name, + email: input.email, + // Note the date is hardcoded here for the sake of test snapshot consistency + when: new Date(0).toLocaleString(), + }), + { + to: 'inbox@example.com', + subject: 'New Contact Us Submission', + replyTo: input.email, + from: 'contact-us@example.com', + } + ) + + return contact +} +``` + +In the code above, we do the following: + +- Import the Mailer and our mail template. +- Call the `mailer.send` function with: + - Our template, which we pass props into based on the user input. + - A set of send options to specify to, from, etc. + +In the example above, we specified a `replyTo` because that suited our business logic. However, we probably don't want to write `replyTo: 'no-reply@example.com'` in all our other emails where we might want that to be set. + +In that case, we can use the `defaults` property in our `api/src/lib/mailer.ts` config: + +```ts title=api/src/lib/mailer.ts +defaults: { + replyTo: 'no-reply@example.com', +}, +``` + +Now that we implemented our example, we might start to think about testing or how to try this out ourselves during development. + +The Mailer behaves slightly differently based on which environment you are running in. + +This helps improve your experience as you don't have to worry about sending real emails during testing or development. + +### Testing + +When your `NODE_ENV` is set to `test`, then the Mailer will start in test mode. In this mode, all mail will be sent using a test handler rather than the default production one or any specific one set when calling `send` or `sendWithoutRendering`. + +By default, when the Mailer is created, it will check if the `@redwoodjs/mailer-handler-in-memory` package is available. If it is, this will become the test handler; otherwise, the test handler will be a no-op that does nothing. The `yarn rw setup mailer` command adds this `@redwoodjs/mailer-handler-in-memory` package as a `devDependency` automatically for you. + +If you want control over this test mode behavior, you can include the following configuration in the `mailer.ts` file: + +```ts title=api/src/lib/mailer.ts +test: { + when: process.env.NODE_ENV === 'test', + handler: 'someOtherHandler', +} +``` + +The `when` property can either be a boolean or a function that returns a boolean. This decides if the Mailer starts in test mode when it is created. The `handler` property can specify a different handler to use in test mode. + +As an example of how this helps with testing, let's work off the example we created above. Let's now test our email functionality in the corresponding test file: + +```ts title=api/src/services/contacts/contacts.test.ts +describe('contacts', () => { + scenario('creates a contact', async () => { + const result = await createContact({ + input: { name: 'String', email: 'String', message: 'String' }, + }) + + expect(result.name).toEqual('String') + expect(result.email).toEqual('String') + expect(result.message).toEqual('String') + + // Mail + const testHandler = mailer.getTestHandler() as InMemoryMailHandler + expect(testHandler.inbox.length).toBe(1) + const sentMail = testHandler.inbox[0] + expect({ + ...sentMail, + htmlContent: undefined, + textContent: undefined, + }).toMatchInlineSnapshot(` + { + "attachments": [], + "bcc": [], + "cc": [], + "from": "contact-us@example.com", + "handler": "nodemailer", + "handlerOptions": undefined, + "headers": {}, + "htmlContent": undefined, + "renderer": "reactEmail", + "rendererOptions": {}, + "replyTo": "String", + "subject": "New Contact Us Submission", + "textContent": undefined, + "to": [ + "inbox@example.com", + ], + } + `) + expect(sentMail.htmlContent).toMatchSnapshot() + expect(sentMail.textContent).toMatchSnapshot() + }) +}) +``` + +Above we tested that our service did the following: + +- Sent one email. +- All the send options (such as to, from, what handler, etc.) match a set of expected values (the inline snapshot). +- The rendered text and HTML content match the expected value (the snapshots). + +### Development + +Similar to the test mode, the Mailer also has a development mode. This mode is selected automatically when the Mailer is created if `NODE_ENV` is **not** set to `production`. This mode behaves similarly to the test mode and by default will attempt to use the `@redwoodjs/mailer-handler-studio` package if it is available. + +You can control the development mode behavior with the following configuration in the `mailer.ts` file: + +```ts title=api/src/lib/mailer.ts +development: { + when: process.env.NODE_ENV !== 'production', + handler: 'someOtherHandler', +}, +``` + +:::tip + +The Mailer studio has some helpful features when it comes to using the Mailer during development. It can provide a mail inbox so that you can send mail to your local machine and see the results. It can also provide live previews of your rendered mail templates as a guide to what they will likely look like when sent to your end users. + +::: + +### Production + +If neither the test nor development mode conditions are met, the Mailer will start in production mode. In this mode, there is no rerouting of your mail to different handlers. Instead, your mail will go directly to your default handler unless you specifically state a different one in your send options. + +### Studio + +Redwood Studio is tightly integrated with the mailer. The goal is to provide you with not just the ability to send mail but also the development tools to make your experience easier and more enjoyable. + +#### Template Previews +mailer-template-preview + +You can have a preview of what your mail templates will look like. These will rerender live as you update your template code and you can even provide a JSON payload which will be used as the props to your template component. These previews are approximate but should easily get you 90% of the way there. + +#### Local Inbox +mailer-local-inbox + +When running in development mode, using the default `@redwoodjs/mailer-handler-studio` development handler, your mail will be sent to a local SMTP inbox running inside of Studio. This allows you to use your app and have full emails sent without worrying about setting up a local inbox yourself or using some other online temporary inbox service. + +## Need a Renderer or Handler? + +If the Mailer does not currenly provide a [handler](notion://www.notion.so/redwoodjs/133467eb46b744fd8ae60df2d493d7d0#handlers) or [renderer](notion://www.notion.so/redwoodjs/133467eb46b744fd8ae60df2d493d7d0#renderers) for the service or technology you wish to use, this doesn't prevent you from using the Mailer. Instead, you can create your own handler or renderer which you can then open source to the wider RedwoodJS community. + +To do this, read over the existing implementations for handlers [here](https://github.com/redwoodjs/redwood/tree/main/packages/mailer/handlers) and renderers [here](https://github.com/redwoodjs/redwood/tree/main/packages/mailer/renderers). You can also find the interfaces that a handler or mailer must satisfy [here](https://github.com/redwoodjs/redwood/tree/main/packages/mailer/core) in the `@redwoodjs/mailer-core` package. + +Be sure to check out the community forum for people working on similar work, to document your own creations, or to get help on anything. diff --git a/docs/versioned_docs/version-7.0/monitoring/sentry.md b/docs/versioned_docs/version-7.0/monitoring/sentry.md new file mode 100644 index 000000000000..700b28809337 --- /dev/null +++ b/docs/versioned_docs/version-7.0/monitoring/sentry.md @@ -0,0 +1,177 @@ +--- +description: Monitor errors, performance and more in your Redwood app using Sentry +--- + +# Sentry + +**Setup [Sentry](https://sentry.io/welcome/) error and performance monitoring across your Redwood application.** + +From your command line, run: +``` +yarn redwood setup monitoring sentry +``` + +This command installs and sets up [`@sentry/node`](https://docs.sentry.io/platforms/node/) and [`@sentry/react`](https://docs.sentry.io/platforms/javascript/guides/react/), enabling [Prisma](https://docs.sentry.io/platforms/node/performance/database/opt-in/#prisma-orm-integration) and [Browser](https://docs.sentry.io/platforms/javascript/performance/instrumentation/automatic-instrumentation/) tracing to capture 100% of events. The following sections detail how you may further integrate Sentry in your Redwood application. + +## Sentry Envelop Plugin + +The setup command will install and attempt to setup the [`@envelop/sentry`](https://the-guild.dev/graphql/envelop/plugins/use-sentry) plugin in your application's GraphQL handler. If there is a problem installing it, the following can be used to do so manually. + + + + +```js title="api/src/functions/graphql.js" +import { useSentry } from '@envelop/sentry' + +import { createGraphQLHandler } from '@redwoodjs/graphql-server' + +import directives from 'src/directives/**/*.{js,ts}' +import sdls from 'src/graphql/**/*.sdl.{js,ts}' +import services from 'src/services/**/*.{js,ts}' + +import 'src/lib/sentry' + +... + +export const handler = createGraphQLHandler({ + directives, + sdls, + services, + extraPlugins: [useSentry()], + ... +}) +``` + + + + + +```ts title="api/src/functions/graphql.ts" +import { useSentry } from '@envelop/sentry' + +import { createGraphQLHandler } from '@redwoodjs/graphql-server' + +import directives from 'src/directives/**/*.{js,ts}' +import sdls from 'src/graphql/**/*.sdl.{js,ts}' +import services from 'src/services/**/*.{js,ts}' + +import 'src/lib/sentry' + +... + +export const handler = createGraphQLHandler({ + directives, + sdls, + services, + extraPlugins: [useSentry()], + ... +}) +``` + + + + +## Setting the current user + +You can associate error and performance events with a unique identity using [`Sentry.setUser`](https://docs.sentry.io/platforms/node/enriching-events/identify-user/). Below is an example of doing so on the API by setting the identity to the user returned by `getCurrentUser`. + + + + +```js title="api/src/lib/auth.js" +import Sentry from 'src/lib/sentry' + +export const getCurrentUser = async (...) => { + const user = await db.user.findUnique(...) + + Sentry.setUser(user) + + ... +} +``` + + + + +```ts title="api/src/lib/auth.ts" +import Sentry from 'src/lib/sentry' + +export const getCurrentUser = async (...) => { + const user = await db.user.findUnique(...) + + Sentry.setUser(user) + + ... +} +``` + + + + +Below we set the current user on the web-side from within a [layout](#generate-layout). Note that the `useEffect` dependency array may vary depending on where you place `Sentry.setUser` in your own application. + + + + +```jsx title="web/src/layouts/SentryLayout/SentryLayout.jsx" +import { useEffect } from 'react' + +import { useAuth } from 'src/lib/auth' +import Sentry from 'src/lib/sentry' + +const SentryLayout = ({ children }) => { + const { currentUser } = useAuth() + + useEffect(() => Sentry.setUser(currentUser), [currentUser]) + + return <>{children} +} + +export default SentryLayout + +``` + + + + +```tsx title="web/src/layouts/SentryLayout/SentryLayout.tsx" +import React, { useEffect } from 'react' + +import { useAuth } from 'src/lib/auth' +import Sentry from 'src/lib/sentry' + +interface Props { + children: React.ReactNode +} + +const SentryLayout = ({ children }: Props) => { + const { currentUser } = useAuth() + + useEffect(() => Sentry.setUser(currentUser), [currentUser]) + + return <>{children} +} + +export default SentryLayout + +``` + + + + + +## Capturing exceptions + +You can make use of Sentry to capture exceptions which occur while executing API [Functions](#generate-function). + +```ts title="api/src/functions/foo.{js,ts}" +import Sentry from 'src/lib/sentry' + +export const handler = async (event, context) => { + try { + ... + } catch (err) { + Sentry.captureException(err) + } +} +``` diff --git a/docs/versioned_docs/version-7.0/prerender.md b/docs/versioned_docs/version-7.0/prerender.md new file mode 100644 index 000000000000..2ac27195086f --- /dev/null +++ b/docs/versioned_docs/version-7.0/prerender.md @@ -0,0 +1,401 @@ +--- +description: Render pages ahead of time +--- + +# Prerender + +Prerendering is great for providing a faster experience for your end users. Your pages will be rendered at build-time, saving your user's browser from having to do that job. + +We thought a lot about what the developer experience should be for route-based prerendering. The result is one of the smallest APIs imaginable! + +:::info How's Prerendering different from SSR/SSG/SWR/ISSG/...? +As Danny said in his [Prerender demo](https://www.youtube.com/watch?v=iorKyMlASZc&t=2844s) at our Community Meetup, the thing all of these have in common is that they render your markup in a Node.js context to produce HTML. The difference is when (build or runtime) and how often. + +Redwood currently supports prerendering at _build_ time. So before you deploy your web side, Redwood will render your pages into HTML, and once the JavaScript has been loaded on the browser, the page becomes dynamic. +::: + + + +## Prerendering a Page + +Prerendering a page is as easy as it gets. Just add the `prerender` prop to the Route that you want to prerender: + +```jsx {3} title="Routes.js" + +``` + +Then run `yarn rw build` and enjoy the performance boost! + + + + +### Prerendering all pages in a Set + +Just add the `prerender` prop to the Set that wraps all Pages you want to prerender: + +```jsx {1} title="Routes.js" + + + + +``` + +### Not found page + +You can also prerender your not found page (a.k.a your 404 page). Just add—you guessed it—the `prerender` prop: + +```diff +- ++ +``` + +This will prerender your NotFoundPage to `404.html` in your dist folder. Note that there's no need to specify a path. + +## Private Routes + +For Private Routes, Redwood prerenders your Private Routes' `whileLoadingAuth` prop: + +```jsx + + // Loading is shown while we're checking to see if the user's logged in + } prerender/> + +``` + +### Rendering skeletons while authenticating +Sometimes you want to render the shell of the page, while you wait for your authentication checks to happen. This can make the experience feel a lot snappier to the user, since they don't wait on a blank screen while their credentials are checked. + +To do this, make use of the `whileLoadingAuth` prop on `` in your Routes file. For example, if we have a dashboard that you need to be logged in to access: + +```js ./web/src/Routes.{tsx,js} +// This renders the layout with skeleton loaders in the content area +// highlight-next-line +const DashboardLoader = () => + + +const Routes = () => { + return ( + + + + + {/* ... */} +``` + +## Dynamic routes & Route Hooks + + + +Let's say you have a route like this + +```jsx + +``` + +To be able to prerender this route you need to let Redwood know what `id`s to use. Why? Because when we are prerendering your pages - at build time - we don't know the full URL i.e. `site.com/blog-post/1` vs `site.com/blog-post/3`. It's up to you to decide whether you want to prerender _all_ of the ids, or if there are too many to do that, if you want to only prerender the most popular or most likely ones. + +You do this by creating a `BlogPostPage.routeHooks.js` file next to the page file itself (so next to `BlogPostPage.js` in this case). It should export a function called `routeParameters` that returns an array of objects that specify the route parameters that should be used for prerendering. + +So for example, for the route `/blogPost/{Id:Int}` - you would return `[ {id: 55}, {id: 77} ]` which would tell Redwood to prerender `/blogPost/55` and `/blogPost/77` + +A single Page component can be used for different routes too! Metadata about the current route will be passed as an argument to `routeParameters` so you can return different route parameters depending on what route it is, if you need to. An example will hopefully make all this clearer. + +For the example route above, all you need is this: + +```js title="BlogPostPage.routeHooks.js" +export function routeParameters() { + return [{ id: 1 }, { id: 2 }, { id: 3 }] +} +``` + +Or, if you wanted to get fancy + +```js title="BlogPostPage.routeHooks.js" +export function routeParameters(route) { + + // If we are reusing the BlogPostPage in multiple routes, e.g. /odd/{id} and + // /blogPost/{id} we can choose what parameters to pass to each route during + // prerendering + // highlight-next-line + if (route.name === 'odd') { + return [{ id: 1 }, { id: 3 }, { id: 5 }] + } else { + return [{ id: 2 }, { id: 4 }, { id: 6 }] + } +} +``` + +With the config above three separate pages will be written: `web/dist/blog-post/1.html`, `web/dist/blog-post/2.html`, `web/dist/blog-post/3.html`. A word of warning - if it's just a few pages like this, it's no problem - but this can easily and quickly explode to thousands of pages, which could slow down your builds and deployments significantly (and make them costly, depending on how you're billed). + +In these routeHooks scripts you have full access to your database using prisma and all your services, should you need it. You use `import { db } from '$api/src/lib/db'` to get access to the `db` object. + +```js title="BlogPostPage.routeHooks.js" +import { db } from '$api/src/lib/db' + +export async function routeParameters() { + return (await db.post.findMany({ take: 7 })).map((post) => ({ id: post.id })) +} +``` + +Take note of the special syntax for the import, with a dollar-sign in front of api. This lets our tooling (typescript and babel) know that you want to break out of the web side the page is in to access code on the api side. This only works in the routeHook scripts (and scripts in the root /scripts directory). + +--- + +## Prerender Utils + +Sometimes you need more fine-grained control over whether something gets prerendered. This may be because the component or library you're using needs access to browser APIs like `window` or `localStorage`. Redwood has three utils to help you handle these situations: + +- `` +- `useIsBrowser` +- `isBrowser` + +:::tip Heads-up! +If you're prerendering a page that uses a third-party library, make sure it's "universal". If it's not, try calling the library after doing a browser check using one of the utils above. + +Look for these key words when choosing a library: _universal module, SSR compatible, server compatible_—all these indicate that the library also works in Node.js. +::: + +### `` component + +This higher-order component is great for JSX: + +```jsx +import { BrowserOnly } from '@redwoodjs/prerender/browserUtils' + +const MyFancyComponent = () => { +

    👋🏾 I render on both the server and the browser

    + +

    🙋‍♀️ I only render on the browser

    +
    +} +``` + +### `useIsBrowser` hook + +If you prefer hooks, you can use the `useIsBrowser` hook: + +```jsx +import { useIsBrowser } from '@redwoodjs/prerender/browserUtils' + +const MySpecialComponent = () => { + const browser = useIsBrowser() + + return ( +
    +

    Render info:

    + + {browser ?

    Browser

    :

    Prerendered

    } +
    + ) +} +``` + +### `isBrowser` boolean + +If you need to guard against prerendering outside React, you can use the `isBrowser` boolean. This is especially handy when running initializing code that only works in the browser: + +```jsx +import { isBrowser } from '@redwoodjs/prerender/browserUtils' + +if (isBrowser) { + netlifyIdentity.init() +} +``` + +### Debugging + +If you just want to debug your app, or check for possible prerendering errors, after you've built it, you can run this command: + +```bash +yarn rw prerender --dry-run +``` + +We're actively looking for feedback! Do let us know if: everything built ok? you encountered specific libraries that you were using that didn’t work? + +--- + +## Images and Assets + + + +Images and assets continue to work the way they used to. For more, see [this doc](assets-and-files.md). + +Note that there's a subtlety in how SVGs are handled. Importing an SVG and using it in a component works great: + +```jsx {1} +import logo from './my-logo.svg' + +function Header() { + return +} +``` + +But re-exporting the SVG as a component requires a small change: + +```jsx +// ❌ due to how Redwood handles SVGs, this syntax isn't supported. +import Logo from './Logo.svg' +export default Logo +``` + +```jsx +// ✅ use this instead. +import Logo from './Logo.svg' + +const LogoComponent = () => + +export default LogoComponent +``` + +--- +## Cell prerendering +As of v3.x, Redwood supports prerendering your Cells with the data you were querying. There's no special config to do here, but a couple of things to note: + +#### 1. Prerendering always happens as an unauthenticated user + +Because prerendering happens at _build_ time, before any authentication is set, all your queries on a Route marked for prerender will be made as a public user + +#### 2. We use your graphql handler to make queries during prerendering + +When prerendering we look for your graphql function defined in `./api/src/functions/graphql.{ts,js}` and use it to run queries against it. + + +### Common Warnings & Errors + +#### Could not load your GraphQL handler - the Loading fallback + +During builds if you encounter this warning +```shell + ⚠️ Could not load your GraphQL handler. + Your Cells have been prerendered in the "Loading" state. +``` + +It could mean one of two things: + +a) We couldn't locate the GraphQL handler at the usual path + +or + +b) There was an error when trying to import your GraphQL handler - maybe due to missing dependencies or an error in the code + + + +If you've moved this GraphQL function, or we encounter an error executing it, it won't break your builds. All your Cells will be prerendered in their `Loading` state, and will update once the JavaScript loads on the browser. This is effectively skipping prerendering your Cells, but they'll still work! + + +#### Cannot prerender the query \{queryName\} as it requires auth. +This error happens during builds when you have a Cell on a page you're prerendering that makes a query marked with `@requireAuth` in your SDL. + +During prerender you are not logged in ([see point 1](#1-prerendering-always-happens-as-an-unauthenticated-user)), so you'll have to conditionally render the Cell - for example: + +```js +import { useAuth } from '@redwoodjs/auth' + +const HomePage = () => { + // highlight-next-line + const { isAuthenticated } = useAuth + + return ( + <> + // highlight-next-line + { isAuthenticated ? : } + +``` + +--- +## Optimization Tips + + +### Dynamically loading large libraries + +If you dynamically load third-party libraries that aren't part of your JS bundle, using these prerendering utils can help you avoid loading them at build time: + +```jsx +import { useIsBrowser } from '@redwoodjs/prerender/browserUtils' + +const ComponentUsingAnExternalLibrary = () => { + const browser = useIsBrowser() + + // if `browser` evaluates to false, this won't be included + if (browser) { + loadMyLargeExternalLibrary() + } + + return ( + // ... + ) +``` + +### Configuring redirects + +Depending on what pages you're prerendering, you may want to change your redirect settings. Keep in mind your redirect settings will vary a lot based on what routes you are prerendering, and the settings of your deployment provider. + + +Using Netlify as an example: + +
    +If you prerender your `notFoundPage`, and all your other routes + +You can remove the default redirect to index in your `netlify.toml`. This means the browser will accurately receive 404 statuses when navigating to a route that doesn't exist: + +```diff +[[redirects]] +- from = "/*" +- to = "/index.html" +- status = 200 +``` + +This makes your app behave much more like a traditional website, where all the possible routes are defined up front. But take care to make sure you are prerendering all your pages, otherwise you will receive 404s on pages that do exist, but that Netlify hasn't been told about. +
    + +
    + +If you don't prerender your 404s, but prerender all your other pages + +You can add a 404 redirect if you want: + +```diff +[[redirects]] + from = "/*" + to = "/index.html" +- status = 200 ++ status = 404 +``` + +This makes your app behave much more like a traditional website, where all the possible routes are defined up front. But take care to make sure you are prerendering all your pages, otherwise you will receive 404s on pages that do exist, but that Netlify hasn't been told about. +
    + + + +### Flash after page load + +You might notice a flash after page load. Prerendering pages still has various benefits (such as SEO), but may seem jarring to users if there's a flash. + +A quick workaround for this is to make sure whatever page you're seeing the flash on isn't dynamically loaded i.e. prevent code splitting. You can do this by explicitly importing the page in `Routes.js`: + +```jsx +import { Router, Route } from '@redwoodjs/router' +// We don't want HomePage to be dynamically loaded +// highlight-next-line +import HomePage from 'src/pages/HomePage' + +const Routes = () => { + return ( + + + + + + ) +} + +export default Routes +``` diff --git a/docs/versioned_docs/version-7.0/project-configuration-dev-test-build.mdx b/docs/versioned_docs/version-7.0/project-configuration-dev-test-build.mdx new file mode 100644 index 000000000000..37df966dafeb --- /dev/null +++ b/docs/versioned_docs/version-7.0/project-configuration-dev-test-build.mdx @@ -0,0 +1,238 @@ +--- +title: Project Configuration +description: Advanced project configuration +--- + +import ReactPlayer from 'react-player' + +# Project Configuration: Dev, Test, Build + +## Babel + +Out of the box Redwood configures [Babel](https://babeljs.io/) so that you can write modern JavaScript and TypeScript without needing to worry about transpilation at all. +GraphQL tags, JSX, SVG imports—all of it's handled for you. + +For those well-versed in Babel config, you can find Redwood's in [@redwoodjs/internal](https://github.com/redwoodjs/redwood/tree/main/packages/internal/src/build/babel). + +### Configuring Babel + +For most projects, you won't need to configure Babel at all, but if you need to you can configure each side (web, api) individually using side-specific `babel.config.js` files. + +> **Heads up** +> +> `.babelrc{.js}` files are ignored. +> You have to put your custom config in the appropriate side's `babel.config.js`: `web/babel.config.js` for web and `api/babel.config.js` for api. + +Let's go over an example. + +#### Example: Adding Emotion + +Let's say we want to add the styling library [emotion](https://emotion.sh), which requires adding a Babel plugin. + +1. Create a `babel.config.js` file in `web`: +```shell +touch web/babel.config.js +``` +
    + +2. Add the `@emotion/babel-plugin` as a dependency: +```shell +yarn workspace web add --dev @emotion/babel-plugin +``` +
    + +3. Add the plugin to `web/babel.config.js`: +```jsx title="web/babel.config.js" +module.exports = { + plugins: ["@emotion"] // 👈 add the emotion plugin +} + +// ℹ️ Notice how we don't need the `extends` property +``` + +That's it! +Now your custom web-side Babel config will be merged with Redwood's. + +## Jest + +Redwood uses [Jest](https://jestjs.io/) for testing. +Let's take a peek at how it's all configured. + +At the root of your project is `jest.config.js`. +It should look like this: + +```jsx title="jest.config.js" +module.exports = { + rootDir: '.', + projects: ['/{*,!(node_modules)/**/}/jest.config.js'], +} +``` + +This just tells Jest that the actual config files sit in each side, allowing Jest to pick up the individual settings for each. +`rootDir` also makes sure that if you're running Jest with the `--collectCoverage` flag, it'll produce the report in the root directory. + +#### Web Jest Config + +The web side's configuration sits in `./web/jest.config.js` + +```jsx +const config = { + rootDir: '../', + preset: '@redwoodjs/testing/config/jest/web', + // ☝️ load the built-in Redwood Jest configuration +} + +module.exports = config +``` + +> You can always see Redwood's latest configuration templates in the [create-redwood-app package](https://github.com/redwoodjs/redwood/blob/main/packages/create-redwood-app/templates/ts/web/jest.config.js). + +The preset includes all the setup required to test everything that's going on in web: rendering React components and transforming JSX, automatically mocking Cells, transpiling with Babel, mocking the Router and the GraphQL client—the list goes on! +You can find all the details in the [source](https://github.com/redwoodjs/redwood/blob/main/packages/testing/config/jest/web/jest-preset.js). + +#### Api Side Config + +The api side is configured similarly, with the configuration sitting in `./api/jest.config.js`. +But the api preset is slightly different in that: + +- it's configured to run tests serially (because Scenarios seed your test database) +- it has setup code to make sure your database is 1) seeded before running tests 2) reset between Scenarios + +You can find all the details in the [source](https://github.com/redwoodjs/redwood/blob/main/packages/testing/config/jest/api/jest-preset.js). + +## GraphQL Codegen + +You can customize the types that Redwood generates from your project too! This is documented in a bit more detail in the [Generated Types](typescript/generated-types#customising-codegen-config) doc. + +## Debug configurations + +### Dev Server +The `yarn rw dev` command is configured by default to open a browser and a debugger on the port `18911` and your redwood app ships with several default configurations to debug with VSCode. + +#### Customizing the configuration +**a) Using the redwood.toml** + +Add/change the `debugPort` or `open` under your api settings + +```toml title="redwood.toml" +[web] + # . +[api] + # . + // highlight-next-line + debugPort = 18911 # change me! +[browser] + // highlight-next-line + open = true # change me! +``` + +**b) Pass a flag to `rw dev` command** + +You can also pass a flag when you launch your dev servers, for example: + +```bash +yarn rw dev --debugPort 75028 +``` +The flag passed in the CLI will always take precedence over your setting in the `redwood.toml` + +Just remember to also change the port you are attaching to in your `./vscode/launch.json` + +### API and Web Debuggers +Simply run your dev server, then attach the debugger from the "run and debug" panel. Quick demo below: + + + +### Compound Debugger +The compound configuration is a combination of the dev, api and web configurations. +It allows you to start all debugging configurations at once, facilitating simultaneous debugging of server and client-side code. + +
    + +> **ℹ️ Tip: Can't see the debug configurations?** In VSCode +> +> You can grab the latest launch.json from the Redwood template [here](https://github.com/redwoodjs/redwood/blob/main/packages/create-redwood-app/templates/ts/.vscode/launch.json). Copy the contents into your project's `.vscode/launch.json` + +## Ignoring the `.yarn` folder + +The `.yarn` folder contains the most recent Yarn executable that Redwood supports +which is the [recommended way](https://github.com/yarnpkg/yarn/issues/7741) +to ensure things run smoothly for everyone. From VSCode's perspective, this of course +is just another folder containing code, so it will + +1. include its contents in project-wide, full-text searches +2. display it in the file browser +3. watch its contents for changes + +… which, depending on your personal preference, is something you may not need or want. + +Fortunately, all these aspects are configurable via VSCode's `settings.json`. You have the +choice of making these changes to your local Redwood project's configuration +found in `.vscode/settings.json` or globally (so they apply to other projects as +well). For global changes, hit F1 or Ctrl+Shift+P +(that's +Shift+P if you're on Mac) +and search for "Preferences: Open User Settings (JSON)". + +Note that the local workspace configuration always overrules your user settings. +The VSCode website [provides an extensive explanation](https://code.visualstudio.com/docs/getstarted/settings#_settings-precedence) +on how its config inheritance works. It also has a complete reference of +[all available settings and their defaults](https://code.visualstudio.com/docs/getstarted/settings#_default-settings). + +### Excluding a folder from search results only + +Adding the following would exclude any `.yarn` folder encountered anywhere in +the project (that's what the +`**` [glob pattern](https://code.visualstudio.com/docs/editor/codebasics#_advanced-search-options) +does) from search results: + +```json + "search.exclude": { + "**/.yarn": true + } +``` + +### Excluding a folder from the file browser and searching + +```json + "files.exclude": { + "**/.yarn": true + } +``` + +This setting also excludes all matching folders and files from search results, +so there's no point in adding a `search.exclude` setting separately. + +Don't worry: this setting won't influence change detection in your "Source Control" +tab—that would be managed via `.gitignore`. + +### Excluding a folder from watching + +```json + "files.watcherExclude": { + "**/.yarn": true + } +``` + +This setting works independently of the ones above and so it needs to be added +separately. It's important to note that files or folders matched by this +setting will no longer immediately appear (or disappear): +- from existing search results (but as soon as you search again or change the search term, they'll be discovered) +- in your "Source Control" tab, unless you hit the "Refresh" button + +Admittedly, the `.yarn` folder won't change that often, so this may not be +the best example. But we thought we'd share this technique with you +so that you'd know how to apply it to any folders that you know change very often, +and how to tell VSCode not to bother wasting any CPU cycles on them. + +## Trailing whitespace + +If you're using VS Code, or another editor that supports +[EditorConfig](https://editorconfig.org), trailing whitespace will be trimmed +in source files, but preserved in html, markdown and mjml files when saving. + +This behavior is controlled by `.vscode/settings` or `.editorconfig` depending +on your editor. + +In JavaScript and TypeScript files trailing whitespace has no significance, +but for html, markdown and mjml it does. That's why the behavior is different +for those files. If you don't like the default behavior Redwood has configured +for you, you're free to change the settings in those two files. diff --git a/docs/versioned_docs/version-7.0/quick-start.md b/docs/versioned_docs/version-7.0/quick-start.md new file mode 100644 index 000000000000..e9c078e1958e --- /dev/null +++ b/docs/versioned_docs/version-7.0/quick-start.md @@ -0,0 +1,149 @@ +--- +description: Redwood quick start +--- + +# Quick Start + +:::info Prerequisites + +- Redwood requires [Node.js](https://nodejs.org/en/) (=20.x) and [Yarn](https://yarnpkg.com/) (>=1.22.21) +- Are you on Windows? For best results, follow our [Windows development setup](how-to/windows-development-setup.md) guide + +::: + +Create a Redwood project with `yarn create redwood-app`: + +``` +yarn create redwood-app my-redwood-project +``` + +:::tip Prefer TypeScript? + +Redwood comes with full TypeScript support from the get-go: + +``` +yarn create redwood-app my-redwood-project --typescript +``` + +::: + +Then change into that directory, yarn install, and start the development server: + +``` +cd my-redwood-project +yarn install +yarn redwood dev +``` + +Your browser should automatically open to [http://localhost:8910](http://localhost:8910) where you'll see the Welcome Page, which links out to many great resources: + +Redwood Welcome Page + +Redwood Welcome Page + +Congratulations on running your first Redwood CLI command! +From dev to deploy, the CLI is with you the whole way. +And there's quite a few commands at your disposal: +``` +yarn redwood --help +``` +For all the details, see the [CLI reference](cli-commands.md). + +## Prisma and the database + +Redwood wouldn't be a full-stack framework without a database. It all starts with the schema. Open the `schema.prisma` file in `api/db` and replace the `UserExample` model with the following `Post` model: + +```js title="api/db/schema.prisma" +model Post { + id Int @id @default(autoincrement()) + title String + body String + createdAt DateTime @default(now()) +} +``` + +Redwood uses [Prisma](https://www.prisma.io/), a next-gen Node.js and TypeScript ORM, to talk to the database. Prisma's schema offers a declarative way of defining your app's data models. And Prisma [Migrate](https://www.prisma.io/migrate) uses that schema to make database migrations hassle-free: + +``` +yarn rw prisma migrate dev + +# ... + +? Enter a name for the new migration: › create posts +``` + +:::tip + +`rw` is short for `redwood` + +::: + +You'll be prompted for the name of your migration. `create posts` will do. + +Now let's generate everything we need to perform all the CRUD (Create, Retrieve, Update, Delete) actions on our `Post` model: + +``` +yarn redwood generate scaffold post +``` + +Navigate to [http://localhost:8910/posts/new](http://localhost:8910/posts/new), fill in the title and body, and click "Save": + +Create a new post + +Did we just create a post in the database? Yup! With `yarn rw generate scaffold `, Redwood created all the pages, components, and services necessary to perform all CRUD actions on our posts table. + +## Frontend first with Storybook + +Don't know what your data models look like? +That's more than ok—Redwood integrates Storybook so that you can work on design without worrying about data. +Mockup, build, and verify your React components, even in complete isolation from the backend: + +``` +yarn rw storybook +``` + +Seeing "Couldn't find any stories"? +That's because you need a `*.stories.{tsx,jsx}` file. +The Redwood CLI makes getting one easy enough—try generating a [Cell](./cells), Redwood's data-fetching abstraction: + +``` +yarn rw generate cell examplePosts +``` + +The Storybook server should hot reload and now you'll have four stories to work with. +They'll probably look a little bland since there's no styling. +See if the Redwood CLI's `setup ui` command has your favorite styling library: + +``` +yarn rw setup ui --help +``` + +## Testing with Jest + +It'd be hard to scale from side project to startup without a few tests. +Redwood fully integrates Jest with both the front- and back-ends, and makes it easy to keep your whole app covered by generating test files with all your components and services: + +``` +yarn rw test +``` + +To make the integration even more seamless, Redwood augments Jest with database [scenarios](testing.md#scenarios) and [GraphQL mocking](testing.md#mocking-graphql-calls). + +## Ship it + +Redwood is designed for both serverless deploy targets like Netlify and Vercel and serverful deploy targets like Render and AWS: + +``` +yarn rw setup deploy --help +``` + +Don't go live without auth! +Lock down your app with Redwood's built-in, database-backed authentication system ([dbAuth](authentication.md#self-hosted-auth-installation-and-setup)), or integrate with nearly a dozen third-party auth providers: + +``` +yarn rw setup auth --help +``` + +## Next Steps + +The best way to learn Redwood is by going through the comprehensive [tutorial](tutorial/foreword.md) and joining the community (via the [Discourse forum](https://community.redwoodjs.com) or the [Discord server](https://discord.gg/redwoodjs)). diff --git a/docs/versioned_docs/version-7.0/realtime.md b/docs/versioned_docs/version-7.0/realtime.md new file mode 100644 index 000000000000..3a247671160e --- /dev/null +++ b/docs/versioned_docs/version-7.0/realtime.md @@ -0,0 +1,675 @@ +# Realtime + +One of the most often-asked questions of Redwood before and after the launch of V1 was, “When will Redwood support a realtime solution?” + +The answer is: **now**. + +## What is Realtime? + +Redwood's initial realtime solution leverages GraphQL and relies on a serverful deployment to maintain a long-running connection between the client and server. + +:::info + +This means that your cannot use Realtime when deploying to Netlify or Vercel. + +See one of Redwood's many [other Deploy providers](./deploy/introduction.md), and the [Docker setup](./docker.md) for good measure. + +::: + +Redwood's GraphQL server uses the [GraphQL over Server-Sent Events](https://github.com/enisdenjo/graphql-sse/blob/master/PROTOCOL.md#distinct-connections-mode) spec's "distinct connections mode" for subscriptions. + +Advantages of SSE over WebSockets include: + +* Transported over simple HTTP instead of a custom protocol +* Built in support for re-connection and event-id +* Simpler protocol +* No trouble with corporate firewalls doing packet inspection + +### Subscriptions and Live Queries + +In GraphQL, there are two options for real-time updates: **live queries** and **subscriptions**. + +Subscriptions are part of the GraphQL specification, whereas live queries are not. + +There are times where subscriptions are well-suited for a realtime problem and in some cases live queries may be a better fit. Later we’ll explore the pros and cons of each approach and how best to decide which to use and when. + +### Defer and Stream + +[Defer and stream](https://the-guild.dev/graphql/yoga-server/docs/features/defer-stream) are directives that allow you to improve latency for clients by sending the most important data as soon as it's ready. + +As applications grow, the GraphQL operation documents can get bigger. The server will only send the response back once all the data requested in the query is ready. But not all requested data is of equal importance, and the client may not need all of the data at once. + +#### Using Defer + +The `@defer` directive allows you to postpone the delivery of one or more (slow) fields grouped in an inlined or spread fragment. + +#### Using Stream + +The `@stream` directive allows you to stream the individual items of a field of the list type as the items are available. + +:::info +The `@stream` directive is currently **not** supported by Apollo GraphQL client. +::: + +## Features + +Realtime handles the hard parts of a GraphQL realtime implementation by automatically: + +- allowing GraphQL Subscription operations to be handled +- merging in your subscriptions types and mapping their handler functions (subscribe and resolve) to your GraphQL schema letting you keep your subscription logic organized and apart from services (your subscription may use a service to respond to an event) +- authenticating subscription requests using the same `@requireAuth` directives already protecting other queries and mutations (or you can implement your own validator directive) +- adding in the `@live` query directive to your GraphQL schema and setting up the `useLiveQuery` envelop plugin to handle requests, invalidation, and managing the storage mechanism needed +- creating and configuring in-memory and persisted Redis stores used by the PubSub transport for subscriptions and Live Queries (and letting you switch between them in development and production) +- placing the pubSub transport and stores into the GraphQL context so you can use them in services, subscription resolvers, or elsewhere (like a webhook, function, or job) to publish an event or invalidate data +- typing your subscription channel event payloads +- support `@defer` and `@stream` directives + +It provides a first-class developer experience for real-time updates with GraphQL so you can easily + +- respond to an event (e.g. NewPost, NewUserNotification) +- respond to a data change (e.g. Post 123's title updated) + +and have the latest data reflected in your app. + +Lastly, the Redwood CLI has commands to generate a boilerplate implementation and sample code needed to create your custom subscriptions and Live Queries. + +Regardless of the implementation chosen, **a stateful server and store are needed** to track changes, invalidation, and who wants to be informed about changes. + +### What can I build with Realtime? + +- Application alerts and messages +- User notifications +- Live charts +- Location updates +- Auction bid updates +- Messaging +- OpenAI streaming responses + +## Redwood Realtime Setup + +To setup realtime in an existing Redwood project, run the following commands: + +* `yarn rw setup server-file` +* `yarn rw setup realtime` + +You'll get: + +* `api/server.ts` where you can configure your Fastify server +* `api/lib/realtime.ts` where you consume your subscriptions and configure realtime with an in-memory or Redis store +* Usage examples for live queries, subscriptions, defer, and stream. You'll get sdl, services/subscriptions for each +* The [`auction` live query](#auction-live-query-example) example +* The [`countdown timer` subscription](#countdown-timer-example) example +* The [`chat` subscription](#chatnew-message-example) examples +* The [`alphabet` stream](#alphabet-stream-example) example +* The [`slow and fast` field defer](#slow-and-fast-field-defer-example) example + +:::note +There is no UI set up for these examples. You can find information on how to try them out using the GraphiQL playground. +::: + +Just add the realtime configuration to your GraphQL handler in `api/src/functions/graphql.ts` and you're good to go: + +```diff title="api/src/functions/graphql.ts" ++ import { realtime } from 'src/lib/realtime' + + export const handler = createGraphQLHandler({ + // ... ++ realtime, + }) +``` + +### Realtime Configuration + +By default, Redwood's realtime configures an in-memory store for the Pub Sub client used with subscriptions and live query invalidation. + +Realtime supports in-memory and Redis stores: + +- In-memory stores are useful for development and testing. +- Redis stores are useful for production. + +To enable defer and streaming, set `enableDeferStream` to true. + +Configure a Redis store and defer and stream in: + +```ts title="api/lib/realtime.ts" +import { RedwoodRealtimeOptions } from '@redwoodjs/realtime' + +import subscriptions from 'src/subscriptions/**/*.{js,ts}' + +// if using a Redis store +// import { Redis } from 'ioredis' +// const publishClient = new Redis() +// const subscribeClient = new Redis() + +/** + * Configure RedwoodJS Realtime + * + * See https://redwoodjs.com/docs/realtime + * + * Realtime supports Live Queries and Subscriptions over GraphQL SSE. + * + * Live Queries are GraphQL queries that are automatically re-run when the data they depend on changes. + * + * Subscriptions are GraphQL queries that are run when a client subscribes to a channel. + * + * Redwood Realtime + * - uses a publish/subscribe model to broadcast data to clients. + * - uses a store to persist Live Query and Subscription data. + * + * Redwood Realtime supports in-memory and Redis stores: + * - In-memory stores are useful for development and testing. + * - Redis stores are useful for production. + */ +export const realtime: RedwoodRealtimeOptions = { + subscriptions: { + subscriptions, + store: 'in-memory', + // if using a Redis store + // store: { redis: { publishClient, subscribeClient } }, + }, + liveQueries: { + store: 'in-memory', + // if using a Redis store + // store: { redis: { publishClient, subscribeClient } }, + }, + // To enable defer and streaming, set to true. + // enableDeferStream: true, +} +``` + +#### PubSub and LiveQueryStore + +By setting up realtime, the GraphQL server adds two helpers on the context: + +* pubSub +* liveQueryStory + +With `context.pubSub` you can subscribe to and publish messages via `context.pubSub.publish('the-topic', id, id2)`. + +With `context.liveQueryStore.` you can `context.liveQueryStore.invalidate(key)` where your key may be a reference or schema coordinate: + +##### Reference +Where the query is: `auction(id: ID!): Auction @requireAuth`: + +* `"Auction:123"` + +##### Schema Coordinate +When the query is: `auctions: [Auction!]! @requireAuth`: + +* `"Query.auctions"` + +## Subscriptions + +Redwood has a first-class developer experience for GraphQL subscriptions. + +#### Subscribe to Events + +- Granular information on what data changed +- Why has the data changed? +- Spec compliant + +### Chat/New Message Example + +```graphql +type Subscription { + newMessage(roomId: ID!): Message! @requireAuth +} +``` + +1. I subscribed to a "newMessage” in room “2” +2. Someone added a message to room “2” with a from and body +3. A "NewMessage" event to Room 2 gets published +4. I find out and see who the message is from and what they messaged (the body) + +### Countdown Timer Example + +Counts down from a starting values by an interval. + +```graphql +subscription CountdownFromInterval { + countdown(from: 100, interval: 10) +} +``` + +This example showcases how a subscription yields its own response. + +## Live Queries + +Redwood has made it super easy to add live queries to your GraphQL server! You can push new data to your clients automatically once the data selected by a GraphQL operation becomes stale by annotating your query operation with the `@live` directive. + +The invalidation mechanism is based on GraphQL ID fields and schema coordinates. Once a query operation has been invalidated, the query is re-executed, and the result is pushed to the client. + +##### Listen for Data Changes + +- I'm not interested in what exactly changed it. +- Just give me the data. +- This is not part of the GraphQL specification. +- There can be multiple root fields. + +### Auction Live Query Example + +```graphql +query GetCurrentAuctionBids @live { + auction(id: "1") { + bids { + amount + } + highestBid { + amount + } + id + title + } +} + +mutation MakeBid { + bid(input: { auctionId: "1", amount: 10 }) { + amount + } +} +``` + +1. I listen for changes to Auction 1 by querying the auction. +2. A bid was placed on Auction 1. +3. The information for Auction 1 is no longer valid. +4. My query automatically refetches the latest Auction and Bid details. + +## Defer Directive + +The `@defer` directive allows you to postpone the delivery of one or more (slow) fields grouped in an inlined or spread fragment. + +### Slow and Fast Field Defer Example + +Here, the GraphQL schema defines two queries for a "fast" and a "slow" (i.e., delayed) information. + +```graphql +export const schema = gql` + type Query { + """ + A field that resolves fast. + """ + fastField: String! @skipAuth + + """ + A field that resolves slowly. + Maybe you want to @defer this field ;) + """ + slowField(waitFor: Int! = 5000): String @skipAuth + } +` +``` + +The Redwood services for these queries return the `fastField` immediately and the `showField` after some delay. + +```ts +import { logger } from 'src/lib/logger' + +const wait = (time: number) => + new Promise((resolve) => setTimeout(resolve, time)) + +export const fastField = async () => { + return 'I am speedy' +} + +export const slowField = async (_, { waitFor = 5000 }) => { + logger.debug('deferring slowField until ...') + await wait(waitFor) + logger.debug('now!') + + return 'I am slow' +} +``` + +When making the query: + +```graphql +query SlowAndFastFieldWithDefer { + ... on Query @defer { + slowField + } + fastField +} +``` + +The response returns: + +```json +{ + "data": { + "fastField": "I am speedy" + } +} +``` + +and will await the deferred field to then present: + +```json +{ + "data": { + "fastField": "I am speedy", + "slowField": "I am slow" + } +} +``` + +## Stream Directive + +The `@stream` directive allows you to stream the individual items of a field of the list type as the items are available. + +### Alphabet Stream Example + +Here, the GraphQL schema defines a query to return the letters of the alphabet: + +```graphql +export const schema = gql` + type Query { + alphabet: [String!]! @skipAuth +` +``` + +The service uses `Repeater` to write a safe stream resolver. + +:::info +[AsyncGenerators](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/AsyncGenerator) as declared via the `async *` keywords are prone to memory leaks and leaking timers. For real-world usage, use Repeater. +::: + +```ts +import { Repeater } from '@redwoodjs/realtime' + +import { logger } from 'src/lib/logger' + +export const alphabet = async () => { + return new Repeater(async (push, stop) => { + const values = ['a', 'b', 'c', 'd', 'e', 'f', 'g'] + const publish = () => { + const value = values.shift() + + if (value) { + logger.debug({ value }, 'publishing') + + push(value) + } + + if (values.length === 0) { + stop() + } + } + + const interval = setInterval(publish, 1000) + + stop.then(() => { + logger.debug('cancel') + clearInterval(interval) + }) + + publish() + }) +} +``` + +### What does the incremental stream look like? + +Since Apollo Client does not yet support the `@stream` directive, you can use them in the GraphiQL Playground or see them in action via CURL. + +When making the request with the `@stream` directive: + +```bash +curl -g -X POST \ + -H "accept:multipart/mixed" \ + -H "content-type: application/json" \ + -d '{"query":"query StreamAlphabet { alphabet @stream }"}' \ + http://localhost:8911/graphql +``` + +Here you see the initial response has `[]` for alphabet data. +Then on each push to the Repeater, an incremental update to the list of letters is sent. +The stream ends when `hasNext` is false: + +```bash +* Connected to localhost (127.0.0.1) port 8911 (#0) +> POST /graphql HTTP/1.1 +> Host: localhost:8911 +> User-Agent: curl/8.1.2 +> accept:multipart/mixed +> content-type: application/json +> Content-Length: 53 +> +< HTTP/1.1 200 OK +< connection: keep-alive +< content-type: multipart/mixed; boundary="-" +< transfer-encoding: chunked +< +--- +Content-Type: application/json; charset=utf-8 +Content-Length: 39 + +{"data":{"alphabet":[]},"hasNext":true} +--- +Content-Type: application/json; charset=utf-8 +Content-Length: 70 + +{"incremental":[{"items":["a"],"path":["alphabet",0]}],"hasNext":true} +--- +Content-Type: application/json; charset=utf-8 +Content-Length: 70 + +{"incremental":[{"items":["b"],"path":["alphabet",1]}],"hasNext":true} +--- +Content-Type: application/json; charset=utf-8 +Content-Length: 70 + +{"incremental":[{"items":["c"],"path":["alphabet",2]}],"hasNext":true} +--- +Content-Type: application/json; charset=utf-8 +Content-Length: 70 + +{"incremental":[{"items":["d"],"path":["alphabet",3]}],"hasNext":true} +--- +Content-Type: application/json; charset=utf-8 +Content-Length: 70 + +{"incremental":[{"items":["e"],"path":["alphabet",4]}],"hasNext":true} +--- +Content-Type: application/json; charset=utf-8 +Content-Length: 70 + +{"incremental":[{"items":["f"],"path":["alphabet",5]}],"hasNext":true} +--- +Content-Type: application/json; charset=utf-8 +Content-Length: 70 + +{"incremental":[{"items":["g"],"path":["alphabet",6]}],"hasNext":true} +--- +... + +--- +Content-Type: application/json; charset=utf-8 +Content-Length: 17 + +{"hasNext":false} +----- +``` + +## How do I choose Subscriptions or Live Queries? + +![image](https://github.com/ahaywood/redwoodjs-streaming-realtime-demos/assets/1051633/e3c51908-434c-4396-856a-8bee7329bcdd) + +When deciding on how to offer realtime data updates, you’ll want to consider: + +- How frequently do your users require information updates? + - Determine the value of "real-time" versus "near real-time" to your users. Do they need to know in less than 1-2 seconds, or is 10, 30, or 60 seconds acceptable for them to receive updates? + - Consider the criticality of the data update. Is it low, such as a change in shipment status, or higher, such as a change in stock price for an investment app? + - Consider the cost of maintaining connections and tracking updates across your user base. Is the infrastructure cost justifiable? + - If you don't require "real" real-time, consider polling for data updates on a reasonable interval. According to Apollo, [in most cases](https://www.apollographql.com/docs/react/data/subscriptions/), your client should not use subscriptions to stay up to date with your backend. Instead, you should poll intermittently with queries or re-execute queries on demand when a user performs a relevant action, such as clicking a button. +- How are you deploying? Serverless or serverful? + - Real-time options depend on your deployment method. + - If you are using a serverless architecture, your application cannot maintain a stateful connection to your users' applications. Therefore, it's not easy to "push," "publish," or "stream" data updates to the web client. + - In this case, you may need to look for third-party solutions that manage the infrastructure to maintain such stateful connections to your web client, such as [Supabase Realtime](https://supabase.com/realtime), [SendBird](https://sendbird.com/), [Pusher](https://pusher.com/), or consider creating your own [AWS SNS-based](https://docs.aws.amazon.com/sns/latest/dg/welcome.html) functionality. + + + +## Showcase Demos + +Please see our [showcase realtime app](https://realtime-demo.fly.dev) for examples of subscriptions and live queries. It also demonstrates how you can handle streaming responses, like those used by OpenAI chat completions. + +### Chat Room (Subscription) + +Sends a message to one of four Chat Rooms. + +Each room subscribes to its new messages via the `NewMessage` channel aka topic. + +```ts +context.pubSub.publish('newMessage', roomId, { from, body }) +``` + +#### Simulate + +```bash +./scripts/simulate_chat.sh -h +Usage: ./scripts/simulate_chat.sh -r [roomId] -n [num_messages] + ./scripts/simulate_chat.sh -h + +Options: + -r roomId Specify the room ID (1-4) for sending chat messages. + -n num_messages Specify the number of chat messages to send. If not provided, the script will run with a random number of messages. +``` +#### Test + +```ts +/** + * To test this NewMessage subscription, run the following in one GraphQL Playground to subscribe: + * + * subscription ListenForNewMessagesInRoom { + * newMessage(roomId: "1") { + * body + * from + * } + * } + * + * + * And run the following in another GraphQL Playground to publish and send a message to the room: + * + * mutation SendMessageToRoom { + * sendMessage(input: {roomId: "1", from: "hello", body: "bob"}) { + * body + * from + * } + * } + */ + ``` + +### Auction Bids (Live Query) + +Bid on a fancy pair of new sneaks! + +When a bid is made, the auction updates via a Live Query due to the invalidation of the auction key. + +```ts + const key = `Auction:${auctionId}` + context.liveQueryStore.invalidate(key) + ``` + +#### Simulate + +```bash +./scripts/simulate_bids.sh -h +Usage: ./scripts/simulate_bids.sh [options] + +Options: + -a Specify the auction ID (1-5) for which to send bids (optional). + -n Specify the number of bids to send (optional). + -h, --help Display this help message. + ``` + +#### Test + +```ts +/** + * To test this live query, run the following in the GraphQL Playground: + * + * query GetCurrentAuctionBids @live { + * auction(id: "1") { + * bids { + * amount + * } + * highestBid { + * amount + * } + * id + * title + * } + * } + * + * And then make a bid with the following mutation: + * + * mutation MakeBid { + * bid(input: {auctionId: "1", amount: 10}) { + * amount + * } + * } + */ +``` + +### Countdown (Streaming Subscription) + +> It started slowly and I thought it was my heart +> But then I realised that this time it was for real + +Counts down from a starting values by an interval. + +This example showcases how a subscription can yields its own response. + +#### Test + +```ts +/** + * To test this Countdown subscription, run the following in the GraphQL Playground: + * + * subscription CountdownFromInterval { + * countdown(from: 100, interval: 10) + * } + */ +``` + +### Bedtime Story (Subscription with OpenAI Streaming) + +> Tell me a story about a happy, purple penguin that goes to a concert. + +Showcases how to use OpenAI to stream a chat completion via a prompt that writes a bedtime story: + +```ts +const PROMPT = `Write a short children's bedtime story about an Animal that is a given Color and that does a given Activity. + +Give the animal a cute descriptive and memorable name. + +The story should teach a lesson. + +The story should be told in a quality, style and feeling of the given Adjective. + +The story should be no longer than 3 paragraphs. + +Format the story using Markdown.` + +``` + +The story updates on each stream content delta via a `newStory` subscription topic event. + +```ts +context.pubSub.publish('newStory', id, story) +``` + +### Movie Mashup (Live Query with OpenAI Streaming) + +> It's Out of Africa meets Pretty Woman. + +> So it's a psychic, political, thriller comedy with a heart With a heart, not unlike Ghost meets Manchurian Candidate. + +-- The Player, 1992 + +Mashup some of your favorite movies to create something new and Netflix-worthy to watch. + +Powered by OpenAI, this movie tagline and treatment updates on each stream content delta via a Live Query by invalidating the `MovieMashup key. + +```ts +context.liveQueryStore.invalidate(`MovieMashup:${id}`) +``` diff --git a/docs/versioned_docs/version-7.0/redwoodrecord.md b/docs/versioned_docs/version-7.0/redwoodrecord.md new file mode 100644 index 000000000000..827428f47767 --- /dev/null +++ b/docs/versioned_docs/version-7.0/redwoodrecord.md @@ -0,0 +1,418 @@ +--- +description: An ORM with a natural interface +--- + +# RedwoodRecord + +> RedwoodRecord is currently considered to be **Experimental**. We are hoping folks will start using it and give us feedback to help shape its development and developer experience. + +RedwoodRecord is an ORM ([Object-relational Mapping](https://en.wikipedia.org/wiki/Object%E2%80%93relational_mapping)) built on top of Prisma. It may be extended in the future to wrap other database-access packages. + +RedwoodRecord is heavily inspired by [ActiveRecord](https://guides.rubyonrails.org/active_record_basics.html) which ships with [Ruby on Rails](https://rubyonrails.org). It presents a natural interface to the underlying data in your database, without worry about the particulars of SQL syntax. + +## Background and Terminology + +Before you can use RedwoodRecord you need to create classes for each database table you intend to access. Let's say we have a blog with three database tables: + +``` +┌───────────┐ ┌────────────┐ ┌────────────┐ +│ User │ │ Post │ │ Comment │ +├───────────┤ ├────────────┤ ├────────────┤ +│ id │•──┐ │ id │•──┐ │ id │ +│ name │ └──<│ userId │ └─<│ postId │ +│ email │ │ title │ │ name │ +└───────────┘ │ body │ │ message │ + └────────────┘ └────────────┘ +``` + +In database-speak we say that these tables have *one-to-many* relationships between them when moving from left to right in the diagram above: one User can have many Posts associated to it, and a Post can have many Comments. The "one" is denoted with a `•` on the arrow above and a `<` denotes the "many." + +You can leave it at that, as saying one-to-many explains both sides of the relationship, but it's sometimes convenient to refer to the relation in the "opposite" direction. Reading the diagram from right to left we could say that a comment *belongs to* a post (it has a foreign key `postId` that points to Post via `Comment.postId` → `Post.id`) and a Post belongs to a User (`Post.userId` → `User.id`) + +There are also *many-to-many* relationships, such as a Product and Category—a Product can have many different Categories, and a Category will have many different Products connected to it: + +``` +┌───────────┐ ┌────────────┐ +│ Product │ │ Category │ +├───────────┤ ├────────────┤ +│ id │>─────<│ id │ +│ name │ │ name │ +│ upc │ │ shelf │ +└───────────┘ └────────────┘ +``` + +These tables don't have any foreign keys (`productId` or `categoryId`) so how do they keep track of each other? Generally you'll create a *join table* between the two that references each other's foreign key: + +``` +┌───────────┐ ┌───────────────────┐ ┌────────────┐ +│ Product │ │ ProductCategory │ │ Category │ +├───────────┤ ├───────────────────┤ ├────────────┤ +│ id │•────<│ productId │ ┌──•│ id │ +│ name │ │ categoryId │>──┘ │ name │ +│ upc │ └───────────────────┘ │ shelf │ +└───────────┘ └────────────┘ +``` + +Now we're back to one-to-many relationships. In Prisma this join table is created and maintained for you. It will be named `_CategoryToPost` and the foreign keys will simply be named `A` and `B` and point to the two separate tables. Prisma refers to this as an [implicit many-to-many](https://www.prisma.io/docs/concepts/components/prisma-schema/relations/many-to-many-relations#implicit-many-to-many-relations) relationship. + +If you want to create the join table yourself and potentially store additional data there (like a timestamp of when the product was categorized) then this is simply a one-to-many relationship on both sides: a Product has many ProductCategories and a Category has many ProductCategories. Prisma refers to this as an [explicitly many-to-many](https://www.prisma.io/docs/concepts/components/prisma-schema/relations/many-to-many-relations#explicit-many-to-many-relations) relationship. + +> TODO: We'll be adding logic soon that will let you get to the categories from a product record (and vice versa) in explicit many-to-manys without having to manually go through ProductCategory. From this: +> ``` +> const product = await Product.find(1) +> const productCategories = await product.productCategories.all() +> const categories = productCategories.map(async (pc) => await pc.categories.all()).flat() +> ``` +> To this: +> ``` +> const product = await Product.find(1) +> const categories = await product.categories.all() +> ``` + +The only other terminology to keep in mind are the terms *model* and *record*. A *model* is the name for the class that represents one database table. The example above has three models: User, Post and Comment. Prisma also calls each database-table declaration in their `schema.prisma` declaration file a "model", but when we refer to a "model" in this doc it will mean the class that extends `RedwoodRecord`. A *record* is a single instance of our model that now represents a single row of data in the database. + +So: I use the User model to find a given user in the database, and, assuming they are found, I now have a single user record (an instance of the User model). + +## Usage + +You'll want to add RedwoodRecord's package to the api side: + +``` +yarn workspace api add @redwoodjs/record +``` + +First you'll need to create a model to represent the database table you want to access. In our blog example, let's create a User model: + +```jsx title="api/src/models/User.js" +import { RedwoodRecord } from '@redwoodjs/record' + +export default class User extends RedwoodRecord { } +``` + +Now we need to parse the Prisma schema, store it as a cached JSON file, and create an `index.js` file with a couple of config settings: + +``` +yarn rw record init +``` + +You'll see that this created `api/src/models/datamodel.js` and `api/src/models/index.js`. + +Believe it or not, that's enough to get started! Let's try using the Redwood console to make some quick queries without worrying about starting up any servers: + +> TODO: Models don't quite work correctly in the console. The require and fetching of records below will work, but actually trying to read any properties returns `undefined`. For now you'll need to test out RedwoodRecord directly in your app. + +``` +yarn rw c +``` + +Now we've got a standard Node REPL but with a bunch of Redwood goodness loaded up for us already. First, let's require our model: + +```jsx +const { User } = require('./api/src/models') +``` + +And now we can start querying and modifying our data: + +```jsx +await User.all() +const newUser = await User.create({ name: 'Rob', email: 'rob@redwoodjs.com' }) +newUser.name = 'Robert' +await newUser.save() +await User.find(1) +await User.findBy({ email: 'rob@redwoodjs.com' }) +await newUser.destroy() +``` + +### Initializing New Records + +To create a new record in memory only (not yet saved to the database) use `build()`: + +```jsx +const user = User.build({ firstName: 'David', lastName: 'Price' }) +``` + +Note that `build` simply builds the record in memory, and thus is not asynchronous, whereas other model methods that interact with Prisma/the DB are. + +See [create/save](#save) below for saving this record to the database. + +### Errors + +When a record cannot be saved to the database, either because of database errors or [validation](#validation) errors, the `errors` property will be populated with the error message(s). + +```jsx +const user = User.build({ name: 'Rob Cameron' }) +await user.save() // => false +user.hasError() // => true +user.errors // => { base: [], email: ['must not be null'] } +user.errors.email // => ['must not be null'] +``` + +> `base` is a special key in the errors object and is for errors that don't apply to a single attribute, like `email`. For example, if you try to delete a record that doesn't exist (maybe someone else deleted it between when you retrieved it from the database and when you tried to delete it) you'll get an error on the `base` attribute: +> +> `user.errors.base // => ['User record to destroy not found']` + +You can preemptively check for errors before attempting to modify the record, but only for errors that would be caught with [validation](#validation), by using `isValid`: + +```jsx +const user = User.build({ name: 'Rob Cameron' }) +user.isValid // => false +user.errors.email // => ['must be formatted like an email address'] +``` + +### Validation + +Records can be checked for valid data before saving to the database by using the same [validation types](services.md#absence) available to [Service Validations](services.md#service-validations): + +```jsx +export default class User extends RedwoodRecord { + static validates = { + email: { presence: true, email: true }, + username: { length: { min: 2, max: 50 } } + } +} + +const user = User.build({ username: 'r' }) +await user.save() // => false +user.errors.email = ['must be present'] +user.errors.username = ['must be at least 2 characters'] +user.email = 'rob@redwoodjs.com' +user.username = 'rob' +await user.save() +``` + +### Finding Records + +There are a few different ways to find records for a model. Sometimes you want to find multiple records (all that match certain criteria) and sometimes only one (the one record with a certain email address). + +#### where() + +`where()` is for finding multiple records. It returns an array of model records. The first argument is the properties that you would normally set as the `where` value in Prisma's [`findMany()` function](https://www.prisma.io/docs/reference/api-reference/prisma-client-reference#findmany). The second argument (optional) is any additional properties (like ordering or limiting) that you want to perform on the resulting records: + +```jsx +await User.where() // would return all records +await User.where({ emailPreference: 'weekly' }) +await User.where({ theme: 'dark' }, { orderBy: { createdAt: 'desc' } }) +``` + +#### all() + +`all()` is simply a synonym for `where()` but makes it clearer that your intention is truly to select all records (and optionally sort/order them). The first (and only) argument is now the additional properties (like `sort` and `orderBy`): + +```jsx +await User.all() +await User.all({ orderBy: { lastName: 'asc' } }) +``` + +#### find() + +Finds a single record by that record's primary key. By default that is `id` but you can change the primary key of a model by defining it in the class definition: + +```jsx +export default class User extends RecordRecord { + static primaryKey = 'ident' +} +``` + +This call will throw an error if the record is not found: if you are trying to select a user by ID, presumably you expect that user to exist. So, it not existing is an exceptional condition. Behind the scenes this uses Prisma's [`findFirst()` function](https://www.prisma.io/docs/reference/api-reference/prisma-client-reference#findfirst). + +```jsx +await User.find(123) +``` + +#### findBy() + +Finds a single record by certain criteria. Similar to `where()`, but will only return the first record that matches. The first argument is the properties that you would normally set as the `where` value to Prisma's [`findFirst()` function](https://www.prisma.io/docs/reference/api-reference/prisma-client-reference#findmany). The second argument (optional) is any additional properties (like ordering or limiting) that you want to perform on the resulting records before selecting one: + +```jsx +await User.findBy({ email: 'rob@redwoodjs.com' }) +await User.findBy({ email: { endsWith: { 'redwoodjs.com' } } }, { orderBy: { lastName: 'asc' }, take: 10 }) +``` + +If no record matching your query was found, it returns `null`. + +#### first() + +Alias for `findBy()`. This function can be used in your code to show your intention to only use the first of potentially multiple records that could match with `findBy()`. + +```jsx +const randomCoreMember = await User.first({ email: { endsWith: { 'redwoodjs.com' } } }) +``` + +### Creating Records + +You can create new records with your RedwoodRecord model in two ways: + +#### create() + +Initializes a new record and saves it. If the save fails, `create` will return `false` instead of the instance of your record. If you need your new model instance (even on a failed save) use the `build()` version next. + +The first argument is the data that would be given to Prisma's `create()` function. The (optional) second argument are any additional properties that are passed on to Prisma: + +```jsx +await User.create({ name: 'Tom Preston-Werner' }) +await User.create({ firstName: 'Rob', email: 'rob@redwoodjs.com' }, { select: ['email'] }) +``` + +#### save() + +When calling `save()` on a record that hasn't been saved to the database, a new record will be created. If the record cannot be saved this call will return `false`. You can have it throw an error instead by including `{ throw: true }` in the first argument. + +If the record cannot be saved you can inspect it for errors. + +```jsx +const user = User.build({ firstName: 'Peter', lastName: 'Pistorius' }) +await user.save() +// or +await user.save({ throw: true }) +// check for errors +user.hasErrors // => true +user.errors.email // => ['can't be null'] +``` + +### Updating Records + +There are two ways to update a record. You can either 1) list all of the attributes to change in a call to `update()`, or 2) set the attributes manually and then call `save()`. + +#### update() + +Call `update()` on a record, including the attributes to change as the first argument. The second (optional) argument are any properties to forward to Prisma on updating. Returns `false` if the record did not save, otherwise returns itself with the newly saves attributes. + +```jsx +const user = await User.find(123) +await user.update({ email: 'rob.cameron@redwoodjs.com' }) +// or +await user.update({ email: 'rob.cameron@redwoodjs.com' }, { throw: true }) +``` + +#### save() + +Save changes made to a record. The first (optional) argument includes any properties to be forwarded to Prisma, as well as the option to throw an error on a failed save: + +```jsx +const user = await User.find(123) +user.email = 'rob.cameron@redwoodjs.com' +await user.save() +// or +await user.save({ throw: true }) +``` + +### Deleting Records + +Records can be deleted easily enough. Coming soon will be class functions for deleting one or multiple records, without having to instantiate an instance of the model first. + +#### destroy() + +Call on a record to delete it in the database. The first (optional) argument are any properties to forward to Prisma when deleting, as well as the option to throw an error if the delete fails. This function returns `false` if the record could not be deleted, otherwise returns the record itself. + +```jsx +const user = await User.find(123) +await user.destroy() +// or +await user.destroy({ throw: true }) +``` + +### Relationships + +As shown in [Background and Terminology](#background-and-terminology) above, RedwoodRecord provides a way to get data from related models. For example, to get the posts belonging to a user via what we call a *relation proxy*: + +```jsx +const user = await User.find(123) +const posts = await user.posts.all() +``` + +In this example `posts` is the proxy. All of the normal finder methods available on a model (`where()`, `all()`, `find()` and `findBy()`) are all available to be called on the relation proxy. But that's not all: you can create records as well and they will automatically be associated to the parent record: + +```jsx +const user = await User.find(123) +const post = await user.posts.create({ title: 'Related post!' }) +post.userId // => 123 +``` + +#### One-to-many + +The *many* records are accessible through the relation proxy: + +```jsx +const user = await User.find(123) +const post = await user.posts.first() +const comments = await post.comments.all() +``` + +You can also create a record: + + +```jsx +const user = await User.find(123) +const post = await user.posts.create({ title: 'Related post!' }) +``` + +#### Belongs-to + +A belongs-to relationship implies that you have the child record and want the parent. In a belongs-to relationship there is only ever a single parent, so there is no need for a relationship proxy property: there is only one record that will ever be returned. + +```jsx +const post = await Post.first() +const user = await post.user +``` + +> You cannot currently create a belongs-to record through the parent, but we're working on syntax to enable this! + +#### Many-to-many + +If you have an implicit many-to-many relationship then you will access the records similar to the one-to-many type: + +```jsx +const product = await Product.find(123) +const categories = await product.categories.all() +``` + +If you have an explicit many-to-many relationship then you need to treat it as a two-step request. First, get the one-to-many relationships for the join table, then a belongs-to relationship for the data you actually want: + +``` +Product -> one-to-many -> ProductCategories -> belongs-to -> Category +------- ----------------- -------- +``` + +```jsx +const product = await Product.find(123) +const productCategories = await product.productCategories.all() +const categories = await Promise.all(productCategories.map(async (pc) => await pc.category)) +``` + +If you wanted to create a new record this way, you would need to create the join table record after having already created/retrieved the records on either side of the relation: + +```jsx +const product = await Product.find(123) +const category = await Category.find(234) +await ProductCategory.create({ productId: product.id, categoryId: category.id }) +``` + +> We're working on improving this syntax to make interacting with these records as simple as the implicit version. Stay tuned! + +## Coming Soon + +The following features are in development but are not available in this experimental release. + +### Lifecycle Callbacks + +Coming soon will be the ability create functions around the lifecycle of a record. For example, to set a newly-created user's default preferences, you may want an `afterCreate` callback that invokes a function (syntax not final): + +```jsx +export default class User extends RedwoodRecord { + static afterCreate = async (user) => { + await user.preferences.create({ email: 'weekly' }) + } +} +``` + +Or make sure that a user has transferred ownership of some data before closing their account: + +```jsx +export default class User extends RedwoodRecord { + static beforeDestroy = async (user) => { + if (await user.teams.count() !== 0) { + throw new Error('Please transfer ownership of your teams first') + } + } +} +``` diff --git a/docs/versioned_docs/version-7.0/router.md b/docs/versioned_docs/version-7.0/router.md new file mode 100644 index 000000000000..860327c1fcda --- /dev/null +++ b/docs/versioned_docs/version-7.0/router.md @@ -0,0 +1,870 @@ +--- +description: About the built-in router for Redwood apps +--- + +# Router + +This is the built-in router for Redwood apps. It takes inspiration from Ruby on Rails, React Router, and Reach Router, but is very opinionated in its own way. + +The router is designed to list all routes in a single file, with limited nesting. We prefer this design, as it makes it very easy to track which routes map to which pages. + +## Router and Route + +The first thing you need is a `Router`. It will contain all of your routes. The router will attempt to match the current URL to each route in turn, and only render those with a matching `path`. The only exception to this is the `notfound` route, which can be placed anywhere in the list and only matches when no other routes do. + +:::note The `notfound` route can't be nested in a `Set` + +If you want to wrap your custom notfound page in a `Layout`, then you should add the `Layout` to the page instead. See [customizing the NotFoundPage](#customizing-the-notfoundpage). + +::: + +Each route is specified with a `Route`. Our first route will tell the router what to render when no other route matches: + +```jsx title="Routes.js" +import { Router, Route } from '@redwoodjs/router' + +const Routes = () => ( + + + +) + +export default Routes +``` + +The router expects a single `Route` with a `notfound` prop. When no other route is found to match, the component in the `page` prop will be rendered. + +To create a route to a normal Page, you'll pass three props: `path`, `page`, and `name`: + +```jsx title="Routes.js" + +``` + +The `path` prop specifies the URL path to match, starting with the beginning slash. The `page` prop specifies the Page component to render when the path is matched. The `name` prop is used to specify the name of the _named route function_. + +## Private Routes + +Some pages should only be visible to authenticated users. We support this using the `PrivateSet` component. Read more [further down](#privateset). + +## Sets of Routes + +You can group Routes into sets using the `Set` component. `Set` allows you to wrap a set of Routes in another component or array of components—usually a Context, a Layout, or both: + +```jsx title="Routes.js" +import { Router, Route, Set } from '@redwoodjs/router' +import BlogContext from 'src/contexts/BlogContext' +import BlogLayout from 'src/layouts/BlogLayout' + +const Routes = () => { + return ( + + + + + + + + + ) +} + +export default Routes +``` + +The `wrap` prop accepts a single component or an array of components. Components are rendered in the same order they're passed, so in the example above, Set expands to: + +```jsx + + + + // ... + + +``` + +Conceptually, this fits with how we think about Context and Layouts as things that wrap Pages and contain content that’s outside the scope of the Pages themselves. Crucially, since they're higher in the tree, `BlogContext` and `BlogLayout` won't rerender across Pages in the same Set. + +There's a lot of flexibility here. You can even nest `Sets` to great effect: + +```jsx title="Routes.js" +import { Router, Route, Set } from '@redwoodjs/router' +import BlogContext from 'src/contexts/BlogContext' +import BlogLayout from 'src/layouts/BlogLayout' +import BlogNavLayout from 'src/layouts/BlogNavLayout' + +const Routes = () => { + return ( + + + + + + + + + + + ) +} +``` + +### Forwarding props + +All props you give to `` (except for `wrap`) will be passed to the wrapper components. + +So this... + +```jsx + + + +``` + +becomes... + +```jsx + + + +``` + +### `PrivateSet` + +A `PrivateSet` makes all Routes inside that Set require authentication. When a user isn't authenticated and attempts to visit one of the Routes in the `PrivateSet`, they'll be redirected to the Route passed as the `PrivateSet`'s `unauthenticated` prop. The originally-requested Route's path is added to the query string as a `redirectTo` param. This lets you send the user to the page they originally requested once they're logged-in. + +Here's an example of how you'd use a `PrivateSet`: + +```jsx title="Routes.js" + + + + + + +``` + +For more fine-grained control, you can specify `roles` (which takes a string for a single role or an array of roles), and the router will check to see that the current user is authorized before giving them access to the Route. If they're not, they will be redirected to the page specified in the `unauthenticated` prop, such as a "forbidden" page. Read more about Role-based Access Control in Redwood [here](how-to/role-based-access-control.md). + +To protect private routes for access by a single role: + +```jsx title="Routes.js" + + + + + + + +``` + +To protect private routes for access by multiple roles: + +```jsx title="Routes.js" + + + + + + + +``` + +Redwood uses the `useAuth` hook under the hood to determine if the user is authenticated. Read more about authentication in Redwood [here](tutorial/chapter4/authentication.md). + +## Link and named route functions + +When it comes to routing, matching URLs to Pages is only half the equation. The other half is generating links to your pages. The router makes this really simple without having to hardcode URL paths. In a Page component, you can do this (only relevant bits are shown in code samples from now on): + +```jsx title="SomePage.js" +import { Link, routes } from '@redwoodjs/router' + +// Given the route in the last section, this produces: +const SomePage = () => +``` + +You use a `Link` to generate a link to one of your routes and can access URL generators for any of your routes from the `routes` object. We call the functions on the `routes` object _named route functions_ and they are named after whatever you specify in the `name` prop of the `Route`. + +Named route functions simply return a string, so you can still pass in hardcoded strings to the `to` prop of the `Link` component, but using the proper named route function is easier and safer. Plus, if you ever decide to change the `path` of a route, you don't need to change any of the `Link`s to it (as long as you keep the `name` the same)! + +## Active links + +`NavLink` is a special version of `Link` that will add an `activeClassName` to the rendered element when it matches **exactly** the current URL. + +```jsx title="MainMenu.js" +import { NavLink, routes } from '@redwoodjs/router' + +// Will render respectively when on the page +const MainMenu = () => +
      +
    • + + + Home + +
    • +
    • + + + Home > Tutorial + +
    • +
    +``` + +Alternatively, you can add the `activeMatchParams` prop to your `NavLink` to match the current URL **partially** + +```jsx +import { NavLink, routes } from '@redwoodjs/router' + +// Will render
    when on any of Home tutorial pages +const MainMenu = () => ( +
  • + + Home > Tutorial + +
  • +) +``` + +> Note `activeMatchParams` is an array of `string` _(key only)_ or `Record` _(key and value)_ + +More granular match, `page` key only and `tab=tutorial` + +```jsx +// Match /?tab=tutorial&page=* +activeMatchParams={[{ tab: 'tutorial' }, 'page' ]} +``` + +### useMatch + +You can use `useMatch` to create your own component with active styles. + +> `NavLink` uses it internally! + +```jsx +import { Link, routes, useMatch } from '@redwoodjs/router' + +const CustomLink = ({ to, ...rest }) => { + const matchInfo = useMatch(to) + + return +} + +const MainMenu = () => { + return +} +``` + +`useMatch` accepts `searchParams` in the `options` for matching granularity which is exactly the same as `activeMatchParams` of `NavLink` + +```jsx +import { Link, routes, useMatch } from '@redwoodjs/router' + +const CustomLink = ({ to, ...rest }) => { + const matchInfo = useMatch(to, { searchParams: [{ tab: 'tutorial' }, 'page'] }) + + return +} +``` + +Passing in `routeParams` you can make it match only on specific route parameter +values. + +```jsx +const match = useMatch('/product/{category}/{id}', { + routeParams: { category: 'shirts' } +}) +``` + +The above example will match /product/shirts/213, but not /product/pants/213 +(whereas not specifying `routeParams` at all would match both). + +To get the path you need to pass to `useMatch` you can use +[`useRoutePaths`](#useroutepaths) or [`useRoutePath`](#useroutepath) + +Here's an example: + +```jsx + + +const animalRoutePath = useRoutePath('animal') +// => '/{animal}/{name}' + +const matchOnlyDog = useMatch(animalRoutePath, { routeParams: { animal: 'dog' }}) +const matchFullyDynamic = useMatch(animalRoutePath) +``` + +In the above example, if the current page url was +`https://example.org/dog/fido` then both `matchOnlyDog` and `matchFullyDynamic` +would have `match: true`. + +If the current page instead was `https://example.org/cat/garfield` then only +`matchFullyDynamic` would match + +See below for more info on route parameters. + +## Route parameters + +To match variable data in a path, you can use route parameters, which are specified by a parameter name surrounded by curly braces: + +```jsx title="Routes.js" + +``` + +This route will match URLs like `/user/7` or `/user/mojombo`. You can have as many route parameters as you like: + +```jsx title="Routes.js" + +``` + +By default, route parameters will match up to the next slash or end-of-string. Once extracted, the route parameters are sent as props to the Page component. In the 2nd example above, you can receive them like so: + +```jsx title="PostPage.js" +const PostPage = ({ year, month, day, slug }) => { ... } +``` + +## Named route functions with parameters + +If a route has route parameters, then its named route function will take an object of those same parameters as an argument: + +```jsx title="SomePage.js" +... +``` + +All parameters will be converted to strings before being inserted into the generated URL. If you don't like the default JavaScript behavior of how this conversion happens, make sure to convert to a string before passing it into the named route function. + +If you specify parameters to the named route function that do not correspond to parameters defined on the route, they will be appended to the end of the generated URL as search params in `key=val` format: + +```jsx title="SomePage.js" +... +// => "/users?sort=desc&filter=all" +``` + +## Route parameter types + +Route parameters are extracted as strings by default, but they will often represent typed data. The router offers a convenient way to auto-convert certain types right in the `path` specification: + +```jsx title="Routes.js" + +``` + +By adding `:Int` onto the route parameter, you are telling the router to only match `/\d+/` and then use `Number()` to convert the parameter into a number. Now, instead of a string being sent to the Page, a number will be sent! This means you could have both a route that matches numeric user IDs **and** a route that matches string IDs: + +```jsx title="Routes.js" + + +``` + +Now, if a request for `/user/mojombo` comes in, it will fail to match the first route, but will succeed in matching the second. + +## Core route parameter types + +We call built-in parameter types _core parameter types_. All core parameter types begin with a capital letter. Here are the types: + +- `Int` - Matches and converts an integer. +- `Float` - Matches and converts a Float. +- `Boolean` - Matches and converts Boolean (true or false only) + +> Note on TypeScript support +> Redwood will automatically generate types for your named routes, but you do have to run `yarn redwood dev` or `yarn redwood build` at least once for your `Routes.{js,ts}` to be parsed + +### Glob Type + +There is one more core type that is a bit different: the glob type. Instead of matching to the next `/` or the end of the string, it will greedily match as much as possible (including `/` characters) and capture the match as a string. + +```jsx title="Routes.js" + +``` + +In this example, we want to take everything after `/file/` and have it sent to the Page as `filePath`. So for the path `/file/api/src/lib/auth.js`, `filePath` would contain `api/src/lib/auth.js`. + +You can use multiple globs in your paths: + +```jsx title="Routes.js" + +``` + +This will match a path like `/from/2021/11/03/to/2021/11/17`. Note that for this to work, there must be some static string between the globs so the router can determine where the boundaries of the matches should be. + +## User route parameter types + +The router goes even further, allowing you to define your own route parameter types. Your custom types must begin with a lowercase letter. You can specify them like so: + +```jsx title="Routes.js" +const userRouteParamTypes = { + slug: { + match: /\w+-\w+/, + parse: (param) => param.split('-'), + }, +} + + + + +``` + +Here we've created a custom `slug` route parameter type. It is defined by `match` and `parse`. Both are optional; the default `match` regexp is `/[^/]+/` and the default `parse` function is `(param) => param`. + +In the route we've specified a route parameter of `{name:slug}` which will invoke our custom route parameter type and if we have a request for `/post/redwood-router`, the resulting `name` prop delivered to `PostPage` will be `['redwood', 'router']`. + +## Trailing slashes + +The router by default removes all trailing slashes before attempting to match the route you are trying to navigate to. + +For example, if you attempt to navigate to `/about` and you enter `/about/`, the router will remove the trailing `/` and will match `path="/about"` + +There are 3 values that can be used with the `trailingSlashes` prop + +1. **never** (default): strips trailing slashes before matching ("/about/" -> "/about") +2. **always**: always adds trailing slashes before matching ("/about" -> "/about/") +3. **preserve** -> paths without a slash won't match paths with a slash ("/about" -> "/about", "/about/" -> "/about/") + +If you need to match trailing slashes exactly, use the `preserve` value. +In the following example, `/about/` will _not_ match `/about` and you will be sent to the `NotFoundPage` + +```jsx + + + + + +``` + +## useParams + +Sometimes it's convenient to receive route parameters as the props to the Page, but in the case where a deeply nested component needs access to the route parameters, it quickly becomes tedious to pass those props through every intervening component. The router solves this with the `useParams` hook: + +```jsx title="SomeDeeplyNestedComponent.js" +import { useParams } from '@redwoodjs/router' + +const SomeDeeplyNestedComponent = () => { + const { id } = useParams() + ... +} +``` + +In the above example, we've pulled in the `id` route parameter without needing to have it passed in to us from anywhere. + +## useLocation + +If you'd like to get access to the current URL, `useLocation` returns a read-only location object representing it. The location object has three properties, [pathname](https://developer.mozilla.org/en-US/docs/Web/API/Location/pathname), [search](https://developer.mozilla.org/en-US/docs/Web/API/Location/search), and [hash](https://developer.mozilla.org/en-US/docs/Web/API/Location/hash), that update when the URL changes. This makes it easy to fire off navigation side effects or use the URL as if it were state: + +```jsx +import { useLocation } from '@redwoodjs/router' + +const App = () => { + const { pathname, search, hash } = useLocation() + + // log the URL when the pathname changes + React.useEffect(() => { + myLogger(pathname) + }, [pathname]) + + // initiate a query state with the search val + const [query, setQuery] = React.useState(search) + + // conditionally render based on hash + if (hash === '#ping') { + return + } + + return <>... +} +``` + +## useRoutePaths + +`useRoutePaths()` is a React hook you can use to get a map of all routes mapped to their literal paths, as they're defined in your routes file. + +Example usage: + +```jsx +const routePaths = useRoutePaths() + +return
    {JSON.stringify(routePaths, undefined, 2)}
    +``` + +Example output: + +``` +{ + "home": "/" + "about": "/about", + "login": "/login", + "signup": "/signup", + "forgotPassword": "/forgot-password", + "resetPassword": "/reset-password", + "newContact": "/contacts/new", + "editContact": "/contacts/{id:Int}/edit", + "contact": "/contacts/{id:Int}", + "contacts": "/contacts", +} +``` + +## useRoutePath + +Use this hook when you only want the path for a single route. By default it +will give you the path for the current route +```jsx +// returns "/about" if you're currently on https://example.org/about +const aboutPath = useRoutePath() +``` + +You can also pass in the name of a route and get the path for that route +```jsx +// returns "/about" +const aboutPath = useRoutePath('about') +``` + +Note that the above is the same as +```jsx +const routePaths = useRoutePaths() +// returns "/about" +const aboutPath = routePaths.about +``` + +## useRouteName + +Use the `useRouteName()` hook to get the name of the current route (the page +the user is currently visiting). The name can then also be used with `routes` +if you need to dynamically get the url to the current page: + +```jsx +const routeName = useRouteName() +const routeUrl = routeName ? routes[routeName]() : undefined +``` + +## Navigation + +### navigate + +If you'd like to programmatically navigate to a different page, you can simply use the `navigate` function: + +```jsx title="SomePage.js" +import { navigate, routes } from '@redwoodjs/router' + +const SomePage = () => { + const onSomeAction = () => { + navigate(routes.home()) + } + ... +} +``` + +The browser keeps track of the browsing history in a stack. By default when you navigate to a new page a new item is pushed to the history stack. But sometimes you want to replace the top item on the stack instead of appending to the stack. This is how you do that in Redwood: `navigate(routes.home(), { replace: true })`. As you can see you need to pass an options object as the second parameter to `navigate` with the option `replace` set to `true`. + +### back + +Going back is as easy as using the `back()` function that's exported from the router. + +```jsx title="SomePage.js" +import { back } from '@redwoodjs/router' + +const SomePage = () => { + const onSomeAction = () => { + back() + } + ... +} +``` + +## Redirect + +If you want to declaratively redirect to a different page, use the `` component. + +In the example below, SomePage will redirect to the home page. + +```jsx title="SomePage.js" +import { Redirect, routes } from '@redwoodjs/router' + +const SomePage = () => +``` + +In addition to the `to` prop, `` also takes an `options` prop. This is the same as [`navigate()`](#navigate)'s second argument: `navigate(_, { replace: true })`. We can use it to *replace* the top item of the browser history stack (instead of pushing a new one). This is how you use it to have this effect: ``. + +## Code-splitting + +By default, the router will code-split on every Page, creating a separate lazy-loaded bundle for each. When navigating from page to page, the router will wait until the new Page module is loaded before re-rendering, thus preventing the "white-flash" effect. + +## Not code splitting + +If you'd like to override the default lazy-loading behavior and include certain Pages in the main bundle, you can simply add the import statement to the `Routes.js` file: + +```jsx title="Routes.js" +import HomePage from 'src/pages/HomePage' +``` + +Redwood will detect your explicit import and refrain from splitting that page into a separate bundle. Be careful with this feature, as you can easily bloat the size of your main bundle to the point where your initial page load time becomes unacceptable. + +## Page loaders & PageLoadingContext + +### Loader while page chunks load + +Because lazily-loaded pages can take a non-negligible amount of time to load (depending on bundle size and network connection), you may want to show a loading indicator to signal to the user that something is happening after they click a link. + +In order to show a loader as your page chunks are loading, you simply add the `whileLoadingPage` prop to your route, `Set` or `PrivateSet` component. + +```jsx title="Routes.js" +import SkeletonLoader from 'src/components/SkeletonLoader' + + + + + + +``` + +After adding this to your app you will probably not see it when navigating between pages. This is because having a loading indicator is nice, but can get annoying when it shows up every single time you navigate to a new page. In fact, this behavior makes it feel like your pages take even longer to load than they actually do! The router takes this into account and, by default, will only show the loader when it takes more than 1000 milliseconds for the page to load. You can change this to whatever you like with the `pageLoadingDelay` prop on `Router`: + +```jsx title="Routes.js" +... +``` + +Now the loader will show up after 500ms of load time. To see your loading indicator, you can set this value to 0 or, even better, [change the network speed](https://developers.google.com/web/tools/chrome-devtools/network#throttle) in developer tools to "Slow 3G" or another agonizingly slow connection speed. + +#### Using PageLoadingContext + +An alternative way to implement whileLoadingPage is to use `usePageLoadingContext`: + +> **VIDEO:** If you'd prefer to watch a video, there's one accompanying this section: https://www.youtube.com/watch?v=BVkyXjUQADs&feature=youtu.be + +```jsx title="SomeLayout.js" +import { usePageLoadingContext } from '@redwoodjs/router' + +const SomeLayout = (props) => { + const { loading } = usePageLoadingContext() + return ( +
    + {loading &&
    Loading...
    } +
    {props.children}
    +
    + ) +} +``` + +When the lazy-loaded page is loading, `PageLoadingContext.Consumer` will pass `{ loading: true }` to the render function, or false otherwise. You can use this context wherever you like in your application! + +### Loader while auth details are being retrieved + +Let's say you have a dashboard area on your Redwood app, which can only be accessed after logging in. When Redwood Router renders your private page, it will first fetch the user's details, and only render the page if it determines the user is indeed logged in. + +In order to display a loader while auth details are being retrieved you can add the `whileLoadingAuth` prop to your `PrivateSet` component: + +```jsx +//Routes.js + + + + + + {/* other routes */} + + +``` + +## `FatalErrorPage` + +Every Redwood project ships with a default `FatalErrorPage` located in `web/src/pages/FatalErrorPage`. +This page gets rendered when an error makes its way all the way to the top of your app without being handled by a catch block or a React error boundary. + +Note that this page behaves differently in development than in production. + +### In Development + +In development, the `FatalErrorPage` provides helpful debugging information about the error and any GraphQL request that's involved. + +For example, if there's a missing component that's causing an error, this's what you'll see: + +![fatal_error_message](/img/router/fatal_error_message.png) + +Or if the variable passed as a prop to a component can't be found: + +![fatal_error_message_query](/img/router/fatal_error_message_query.png) + +And if the page has a Cell, you'll see the Cell's request and response which may have contributed to the error: + +![fatal_error_message_request](/img/router/fatal_error_request.png) + +### In Production + +By default, the `FatalErrorPage` in production is barebones: + +![fatal_something_went_wrong](/img/router/fatal_something_went_wrong.png) + +### Customizing the `FatalErrorPage` + +You can customize the production `FatalErrorPage`, but it's important to keep things simple to avoid the possibility that it'll cause its own error. +If it does, the router still renders a generic error page, but your users will appreciate something a bit more thoughtful: + +![fatal_something_went_wrong_custom](/img/router/fatal_something_went_wrong_custom.png) + +```jsx title="web/src/pages/FatalErrorPage/FatalErrorPage.js" +import { Link, routes } from '@redwoodjs/router' + +// ... + +export default RedwoodDevFatalErrorPage || + (() => ( +
    +
    +
    +

    + 🤦‍♂️ Oops. +

    +
    +
    +

    + Something went wrong +

    +

    + Sorry about that. Please contact support for help. +

    +
    +
    + + Home + + + Contact Support + +
    +
    +
    +
    +
    + )) + ``` + +Note that if you're copy-pasting this example, it uses [Tailwind CSS](https://tailwindcss.com), so you'll have to set that up first. See the [setup ui](./cli-commands.md#setup-ui) CLI command to add it to your project. + +:::note Can I customize the development one? + +As it's part of the RedwoodJS framework, you can't _change_ the dev fatal error page, but you can always build your own that takes the same props. If there's a feature you want to add to the built-in version, let us know on the [forums](https://community.redwoodjs.com/). + +::: + +## `NotFoundPage` + +Every Redwood project ships with a default `NotFoundPage` located in `web/src/pages/NotFoundPage`. + +But just because it's called `NotFoundPage` doesn't mean the router knows that. The only way the router knows which page is the `NotFoundPage` is via the `notfound` prop, which tells the router what to render when no routes match: + +```jsx title="web/src/Routes.js" +import { Router, Route } from '@redwoodjs/router' + +const Routes = () => ( + + // highlight-next-line + + +) + +export default Routes +``` + +### Customizing the `NotFoundPage` + +By default, the `NotFoundPage` is a basic HTML page with internal styles: + +```jsx title="web/src/pages/NotFoundPage/NotFoundPage.js" +export default () => ( +
    + // ... some custom css +
    +

    + 404 Page Not Found +

    +
    +
    +) +``` + +You're free to customize it however you like. You can change the markup and even use CSS or UI libraries to style it. +Here's an example using [Tailwind CSS](https://tailwindcss.com). +(See the [setup ui](./cli-commands.md#setup-ui) CLI command to add it to your project.) + +![custom_not_found](/img/router/custom_not_found_page.png) + +```jsx title="web/src/pages/NotFoundPage/NotFoundPage.js" +import { Link, routes } from '@redwoodjs/router' + +export default () => ( +
    +
    +
    +

    404

    +
    +
    +

    + Page not found +

    +

    + Check the URL in the address bar and please try again. +

    +
    +
    + + Home + + + Get Help + +
    +
    +
    +
    +
    +) +``` + +While the `notfound` route can't be nested in a `Set` like other routes, you can still wrap it in Layouts by importing them into the page: + +```jsx title="web/src/pages/NotFoundPage/NotFoundPage.js" +// highlight-next-line +import MainLayout from 'src/layouts/MainLayout/MainLayout' + +export default () => ( + // highlight-next-line + +
    +
    +

    + 404 Page Not Found +

    +
    +
    + // highlight-next-line +
    +) +``` + +This means that the `NotFoundPage` can use Redwood features like Cells or auth to construct navigation options or detailed header and footer content to help your users find their way back to the main application. diff --git a/docs/versioned_docs/version-7.0/schema-relations.md b/docs/versioned_docs/version-7.0/schema-relations.md new file mode 100644 index 000000000000..0d0b3add404b --- /dev/null +++ b/docs/versioned_docs/version-7.0/schema-relations.md @@ -0,0 +1,240 @@ +--- +description: How Prisma relations work with scaffolds +--- + +# Prisma Relations and Redwood's Generators + +## Many-to-many Relationships + +A many-to-many relationship is accomplished by creating a "join" or "lookup" table between two other tables. +For example, if a **Product** can have many **Tag**s, any given **Tag** can also have many **Product**s that it is attached to. +A database diagram for this relationship could look like: + +``` +┌───────────┐ ┌─────────────────┐ ┌───────────┐ +│ Product │ │ ProductsOnTag │ │ Tag │ +├───────────┤ ├─────────────────┤ ├───────────┤ +│ id │────<│ productId │ ┌──│ id │ +│ title │ │ tagId │>──┘ │ name │ +│ desc │ └─────────────────┘ └───────────┘ +└───────────┘ +``` + +[Here](https://www.prisma.io/docs/concepts/components/prisma-schema/relations#many-to-many-relations) +are Prisma's docs for creating many-to-many relationships. +The `schema.prisma` syntax to create this relationship looks like: + +```jsx +model Product { + id Int @id @default(autoincrement()) + title String + desc String + tags Tag[] +} + +model Tag { + id Int @id @default(autoincrement()) + name String + products Product[] +} +``` + +These relationships can be [implicit](https://www.prisma.io/docs/concepts/components/prisma-schema/relations/many-to-many-relations#implicit-many-to-many-relations) (as this diagram shows) or [explicit](https://www.prisma.io/docs/concepts/components/prisma-schema/relations/many-to-many-relations#explicit-many-to-many-relations) (explained below). Redwood's SDL generator (which is also used by the scaffold generator) only supports an **explicit** many-to-many relationship when generating with the `--crud` flag. What's up with that? + +## CRUD Requires an `@id` + +CRUD (Create, Retrieve, Update, Delete) actions in Redwood currently require a single, unique field in order to retrieve, update or delete a record. This field must be denoted with Prisma's [`@id`](https://www.prisma.io/docs/reference/api-reference/prisma-schema-reference#id) attribute, marking it as the tables's primary key. This field is guaranteed to be unique and so can be used to find a specific record. + +Prisma's implicit many-to-many relationships create a table _without_ a single field marked with the `@id` attribute. Instead, it uses a similar attribute: [`@@id`](https://www.prisma.io/docs/reference/api-reference/prisma-schema-reference#id-1) to define a *multi-field ID*. This multi-field ID will become the tables's primary key. The diagram above shows the result of letting Prisma create an implicit relationship. + +Since there's no single `@id` field in implicit many-to-many relationships, you can't use the SDL generator with the `--crud` flag. Likewise, you can't use the scaffold generator, which uses the SDL generator (with `--crud`) behind the scenes. + +## Supported Table Structure + +To support both CRUD actions and to remain consistent with Prisma's many-to-many relationships, a combination of the `@id` and [`@@unique`](https://www.prisma.io/docs/reference/api-reference/prisma-schema-reference#unique-1) attributes can be used. With this, `@id` is used to create a primary key on the lookup-table; and `@@unique` is used to maintain the table's unique index, which was previously accomplished by the primary key created with `@@id`. + +> Removing `@@unique` would let a specific **Product** reference a particular **Tag** more than a single time. + +You can get this working by creating an explicit relationship—defining the table structure yourself: + +```jsx +model Product { + id Int @id @default(autoincrement()) + title String + desc String + tags ProductsOnTag[] +} + +model Tag { + id Int @id @default(autoincrement()) + name String + products ProductsOnTag[] +} + +model ProductsOnTag { + id Int @id @default(autoincrement()) + tagId Int + tag Tag @relation(fields: [tagId], references: [id]) + productId Int + product Product @relation(fields: [productId], references: [id]) + + @@unique([tagId, productId]) +} +``` + +Which creates a table structure like: + +``` +┌───────────┐ ┌──────────────────┐ ┌───────────┐ +│ Product │ │ ProductsOnTags │ │ Tag │ +├───────────┤ ├──────────────────┤ ├───────────┤ +│ id │──┐ │ id │ ┌──│ id │ +│ title │ └──<│ productId │ │ │ name │ +│ desc │ │ tagId │>─┘ └───────────┘ +└───────────┘ └──────────────────┘ + +``` + +Almost identical! But now there's an `id` and the SDL/scaffold generators will work as expected. The explicit syntax gives you a couple additional benefits—you can customize the table name and even add more fields. Maybe you want to track which user tagged a product—add a `userId` column to `ProductsOnTags` and now you know. + +## Troubleshooting Generators + +Are you getting errors when generating SDLs or scaffolds for your Prisma models? +There's a known limitation in Redwood's GraphQL type generation that happens when generating SDL for, or scaffolding out, a Prisma model that has relations before the SDL for the related model exists. + +This may sound a little abstract, so let's look at an example. Let's say that you're modeling bookshelves. Your prisma schema has two data models, `Book` and `Shelf`. This is a one to many relationship: a shelf has many books, but a book can only be on one shelf: + +```js +model Book { + id Int @id @default(autoincrement()) + title String @unique + // highlight-start + shelf Shelf? @relation(fields: [shelfId], references: [id]) + shelfId Int? + // highlight-end +} + +model Shelf { + id Int @id @default(autoincrement()) + name String @unique + // highlight-next-line + books Book[] +} +``` + +The data model looks great. Let's make it real with SDLs and services: + +``` +yarn rw g sdl Book +``` + +Here's how the output from the command starts: + +```bash + ✔ Generating SDL files... + ✔ Successfully wrote file `./api/src/graphql/books.sdl.js` + ✔ Successfully wrote file `./api/src/services/books/books.scenarios.js` + ✔ Successfully wrote file `./api/src/services/books/books.test.js` + ✔ Successfully wrote file `./api/src/services/books/books.js` +``` + +Looks like it's working so far. The SDL and service files generated! +But, when the command starts generating types... 💥 + +``` + ⠙ Generating types ... +Failed to load schema + +# ... + +type Query { + redwood: Redwood +},graphql/**/*.sdl.{js,ts},directives/**/*.{js,ts}: + + Unknown type: "Shelf". + Error: Unknown type: "Shelf". +``` + +What happened? +Remember, the first thing to do when you get an error: _read the error message_. +The key is `Unknown type: "Shelf"`. +The type of `Book`'s `shelf` field is `Shelf`. +But we didn't generate the SDL for `Shelf` yet, so it doesn't exist. +And naturally, types can't be generated for it. + +But fear not. +This should be an easy fix. +There are two ways you can go about it. + +You can generate the SDLs for all the models in the relation, ignoring the errors. This way the last model in the relation should generate cleanly. + +Or, you can remove or comment out the relations: + +```js +model Book { + id Int @id @default(autoincrement()) + title String @unique + // highlight-start + // Shelf Shelf? @relation(fields: [shelfId], references: [id]) + // shelfId Int? + // highlight-end +} + +model Shelf { + id Int @id @default(autoincrement()) + name String @unique + // highlight-next-line + // books Book[] +} +``` + +Then, generate the SDL for, or scaffold out, each model separately: + +``` +yarn rw g sdl Book +# ... + +yarn rw g sdl Shelf +# ... +``` + +And lastly, add or comment in the relationships and regenerate their SDLs or scaffolds using the `--force` flag to overwrite the existing files, adding the `--no-tests` flag to preserve your tests and scenario files (if needed): + +``` +yarn rw g sdl Book --force --no-tests +# ... + +yarn rw g sdl Shelf --force --no-tests +# ... +``` + +### Self-Relations + +[Self-relations](https://www.prisma.io/docs/concepts/components/prisma-schema/relations/self-relations#one-to-many-self-relations) are useful for modeling parent-child relationships where the parent and child are the "same type of thing". +For example, in a business, everyone is an employee with a role and possibly someone to directly report to: + +* President—no direct report (for the purposes of this example) +* Director—reports to the President +* Manager—reports to a Director +* Employee—reports to a Manager, but has no direct reports + +Let's use a self-relation to model this in our Prisma schema: + +```js +model Employee { + id Int @id @default(autoincrement()) + name String + jobTitle String + // highlight-start + reportsToId Int? @unique + reportsTo Employee? @relation("OrgChart", fields: [reportsToId], references: [id]) + directReports Employee? @relation("OrgChart") + // highlight-end +} +``` + +For the generators, what's important here is that the related models are optional. +`reportsToId`, `reportsTo`, and `directReports` use Prisma's `?` syntax to indicate that they're optional—not required. +The Redwood generators may complain or fail if you try to force a requirement here. + +It's important because if you're at the top—say you're the President—then you don't have a `reportsTo`, and if you're just an Employee, then you don't have anyone that directly reports to you. diff --git a/docs/versioned_docs/version-7.0/security.md b/docs/versioned_docs/version-7.0/security.md new file mode 100644 index 000000000000..1493da1f2d10 --- /dev/null +++ b/docs/versioned_docs/version-7.0/security.md @@ -0,0 +1,77 @@ +--- +description: Build and deploy secure applications +--- + +# Security + +RedwoodJS wants you to be able build and deploy secure applications and takes the topic of security seriously. + +* [RedwoodJS Security](https://github.com/redwoodjs/redwood/security) on GitHub +* [CodeQL code scanning](https://github.com/features/security) +* [Authentication](authentication.md) +* [Webhook signature verification](webhooks.md) +* [Ways to keep your serverless functions secure](serverless-functions.md#security-considerations) +* [Environment variables for secure keys and tokens](environment-variables.md) + +> ⚠️ **Security is Your Responsibility** +> While Redwood offers the tools, practices, and information to keep your application secure, it remains your responsibility to put these in place. Proper password, token, and key protection using disciplined communication, password management systems, and environment management services like [Doppler](https://www.doppler.com) are strongly encouraged. + +> **Security Policy and Contact Information** +> The RedwoodJS Security Policy is located [in the codebase repository on GitHub](https://github.com/redwoodjs/redwood/security/policy). +> +> To report a potential security vulnerability, contact us at [security@redwoodjs.com](mailto:security@redwoodjs.com). + +## Authentication + +`@redwoodjs/auth` is a lightweight wrapper around popular SPA authentication libraries. We currently support [the following authentication providers](authentication.md) as well as a self-hosted solution ([dbAuth](auth/dbauth.md)): + +* Netlify Identity Widget +* Auth0 +* Azure Active Directory +* Netlify GoTrue-JS +* Magic Links - Magic.js +* Firebase's GoogleAuthProvider +* Ethereum +* Supabase +* Nhost + +For example implementations, please see [Authentication](https://github.com/redwoodjs/redwood/tree/main/packages/auth) and the use of the `getCurrentUser` and `requireAuth` helpers. + +For a demonstration, check out the [Auth Playground](https://redwood-playground-auth.netlify.app). + +## GraphQL + +GraphQL is a fundamental part of Redwood. For details on how Redwood uses GraphQL and handles important security considerations, please see the [GraphQL Security](graphql.md#security) section and the [Secure Services](services.md#secure-services) section. + +### Malicious Document Requests + +The RedwoodJS GraphQL handler sets [reasonable defaults](graphql.md#security) to prevent abusive queries that attackers often use to exploit systems. +### Disable Introspection and Playground + +Because both introspection and the playground share possibly sensitive information about your data model, your data, your queries and mutations, best practices for deploying a GraphQL Server call to [disable these in production](graphql.md#introspection-and-playground-disabled-in-production), by default RedwoodJS **only enables introspection and the playground when running in development**. + +:::note + +For more information on how to enable introspection in production, please see the [GraphQL Docs](graphql.md#introspection-and-playground-disabled-in-production). +::: + +## Functions + +When deployed, a [serverless function](serverless-functions.md) is an open API endpoint. That means anyone can access it and perform any tasks it's asked to do. In many cases, this is completely appropriate and desired behavior. But there are often times you need to restrict access to a function, and Redwood can help you do that using a [variety of methods and approaches](serverless-functions.md#security-considerations). + +For details on how to keep your functions secure, please see the [Serverless functions & Security considerations](serverless-functions.md#security-considerations) section in the RedwoodJS documentation. + +## Webhooks + +[Webhooks](webhooks.md) are a common way that third-party services notify your RedwoodJS application when an event of interest happens. + +They are a form of messaging or automation and allows web applications to communicate with each other and send real-time data from one application to another whenever a given event occurs. + +Since each of these webhooks will call a function endpoint in your RedwoodJS api, you need to ensure that these run **only when they should**. That means you need to: + +* Verify it comes from the place you expect +* Trust the party +* Know the payload sent in the hook hasn't been tampered with +* Ensure that the hook isn't reprocessed or replayed + +For details on how to keep your incoming webhooks secure and how to sign your outgoing webhooks, please see [Webhooks](webhooks.md). diff --git a/docs/versioned_docs/version-7.0/seo-head.md b/docs/versioned_docs/version-7.0/seo-head.md new file mode 100644 index 000000000000..7cb5772edd62 --- /dev/null +++ b/docs/versioned_docs/version-7.0/seo-head.md @@ -0,0 +1,356 @@ +--- +description: Use meta tags to set page info for SEO +--- + +# SEO & `` tags + +Search Engine Optimization is a dark art that some folks dedicate their entire lives to. We've add a couple of features to Redwood to make HTML-based SEO fairly simple. + +## Adding a Title + +You certainly want to change the title of your Redwood app from the default of "Redwood App." You can start by adding or modifying `title` inside of `/redwood.toml` + +```diff title=redwood.toml +[web] +- title = "Redwood App" ++ title = "My Cool App" + port = 8910 + apiUrl = "/.redwood/functions" +``` + +This title (the app title) is used by default for all your pages if you don't define another one. +It will also be use for the title template. + +### Title Template + +Now that you have the app title set, you probably want some consistence with the page title, that's what the title template is for. + +Add `titleTemplate` as a prop for `RedwoodProvider` to have a title template for every pages + +```diff title=web/src/App.(tsx|jsx) +- ++ + /* ... */ + +``` + +You can use whatever formatting you'd like in here. Some examples: + +```jsx +"%PageTitle | %AppTitle" => "Home Page | Redwood App" + +"%AppTitle · %PageTitle" => "Redwood App · Home Page" + +"%PageTitle : %AppTitle" => "Home Page : Redwood App" +``` + +## Adding to Page `` + +So you want to change the title of your page, or add elements to the `` of the page? We've got you! + +Let's say you want to change the title of your About page, Redwood provides a built in `` component, which you can use like this: + +```diff title=web/src/pages/AboutPage/AboutPage.(tsx|jsx) ++import { Head } from '@redwoodjs/web' + +const AboutPage = () => { + return ( +
    +

    AboutPage

    ++ ++ About the team ++ +``` + +You can include any valid `` tag in here that you like. However, Redwood also provides a utility component [<Metadata>](#setting-meta-tags-and-opengraph-directives-with-metadata). + +:::caution `` Deprecation + +Prior to Redwood 6.6.0 this component was called `` and had several special hard-coded props like `ogContentUrl`, which didn't properly map to the OpenGraph spec. We'll still render `` for the foreseeable future, but it's deprecated and you should migrate to `` if you have an existing app. + +::: + +### What About Nested Tags? + +Redwood uses [react-helmet-async](https://github.com/staylor/react-helmet-async) underneath, which will use the tags furthest down your component tree. + +For example, if you set title in your Layout, and a title in your Page, it'll render the one in Page - this way you can override the tags you wish, while sharing the tags defined in Layout. + +:::info Bots & `` Tags + +For these headers to appear to bots and scrapers e.g. for twitter to show your title, you have to make sure your page is prerendered. If your content is static you can use Redwood's built in [Prerender](prerender.md). For dynamic tags, check the [Dynamic head tags](#dynamic-tags) + +::: + +## Setting `` Tags and OpenGraph Directives with `` + +Often we want to set more than just the title and description of the page—most commonly [OpenGraph](https://ogp.me/) headers. + +Redwood provides a convenience component `` to help you create most of these `` tags for you with a more concise syntax. But, you can also pass children and define any custom content that you want. + +Here's an example setting some common meta, including a page title, description, `og:image` and an `http-equiv`: + +```jsx +import { Metadata } from '@redwoodjs/web' + +const AboutPage = () => { + return ( +
    + + + + +

    About Page

    +

    This is the about page!

    +
    + ) +} + +export default AboutPage +``` + +This code would be transformed into this HTML and injected into the `` tag: + +```html +About page + + + + + + + + + +``` + +Setting an `og:image` is how sites like Facebook and Slack can show a preview of a URL when pasted into a post (also known as "unfurling"): + +![Typical URL unfurl](/img/facebook_unfurl.png) + +Sites like GitHub go a step farther than a generic image by actually creating an image for a repo on the fly, including details about the repo itself: + +![GitHub's og:image for the redwood repo](https://opengraph.githubassets.com/322ce8081bb85a86397a59494eab1c0fbe942b5104461f625e2c973c46ae4179/redwoodjs/redwood) + +If you want to write your own `` tags, skipping the interpolation that `` does for you, you can pass them as children to `` or just write them into the `` tag as normal. + +### `` Props + +For the most part `` creates simple `` tags based on the structure of the props you pass in. There are a couple of special behaviors described below. + +#### Plain Key/Value Props + +Any "plain" key/value prop will be turned into a `` tag with `name` and `content` attributes: + +```jsx + +// generates + +``` + +Child elements are just copied 1:1 to the resulting output: + +```jsx + + + +// generates + + +``` + +#### Passing Objects to Props + +Any props that contain an object will create a `` tag with `property` and `content` attributes, and the `property` being the names of the nested keys with a `:` between each: + +```jsx + +// generates + +``` + +This is most commonly used to create the "nested" structure that a spec like OpenGraph uses: + +```jsx + +// generates + +``` + +You can create multiple `` tags with the same name/property (allowed by the OpenGraph spec) by using an array: + +```jsx + +// generates + + +``` + +You can combine nested objects with strings to create any structure you like: + +```jsx + +// generates + + + + + + + +``` + +#### Special OpenGraph Helpers + +If you define _any_ `og` prop, we will copy any `title` and `description` to an `og:title` and `og:description`: + +```jsx + +// generates + + +``` + +You can override this behavior by explicitly setting `og:title` or `og:description` to `null`: + +```jsx + +// generates + +``` + +Of course, if you don't want any auto-generated `og` tags, then don't include any `og` prop at all! + +In addition to `og:title` and `og:description`, if you define _any_ `og` prop we will generate an `og:type` set to `website`: + +```jsx + +// generates + +``` + +You can override the `og:type` by setting it directly: + +```jsx + +// generates + +``` + +#### Other Special Cases + +If you define a `title` prop we will automatically prepend a `` tag to the output: + +```jsx +<Metadata title="My Website" /> +// generates +<title>My Website + +``` + +If you define a `charSet` prop we will create a `` tag with the `charset` attribute: + +```jsx + +// generates + +``` + +We simplified some of the examples above by excluding the generated `` and `og:type` tags, so here's the real output if you included `title` and `og` props: + +```jsx +<Metadata title="My Website" og /> +// generates +<title>My Website + + + + +``` + +:::info Do I need to apply these same tags over and over in every page? + +Some `` tags, like `charset` or `locale` are probably applicable to the entire site, in which case it would be simpler to just include these once in your `index.html` instead of having to set them manually on each and every page/cell of your site. + +::: + +This should allow you to create a fairly full-featured set of `` tags with minimal special syntax! A typical `` invocation could look like: + +```jsx + +``` + +## Dynamic tags + +Bots will pick up our tags if we've prerendered the page, but what if we want to set the `` based on the output of the Cell? + +:::info Prerendering + +As of v3.x, Redwood supports prerendering your [Cells](https://redwoodjs.com/docs/cells) with the data you were querying. For more information please refer [to this section](https://redwoodjs.com/docs/prerender#cell-prerendering). + +::: + +Let's say in our `PostCell`, we want to set the title to match the `Post`. + +```jsx +import { Metadata } from '@redwoodjs/web' + +import Post from 'src/components/Post/Post' + +export const QUERY = gql` + query FindPostById($id: Int!) { + post: post(id: $id) { + title + snippet + author { + name + } + } + } +` + +export const Loading = /* ... */ + +export const Empty = /* ... */ + +export const Success = ({ post }) => { + return ( + <> + + + + ) +} +``` + +Once the `Success` component renders, it will update your page's `` and set the relevant `<meta>` tags for you! diff --git a/docs/versioned_docs/version-7.0/serverless-functions.md b/docs/versioned_docs/version-7.0/serverless-functions.md new file mode 100644 index 000000000000..c0c53fc8a846 --- /dev/null +++ b/docs/versioned_docs/version-7.0/serverless-functions.md @@ -0,0 +1,895 @@ +--- +description: Create, develop, and run serverless functions +--- + +# Serverless Functions (API Endpoints) + +<!-- `redwood.toml`—`api/src/functions` by default. --> + + +:::info + +You can think of serverless functions as API Endpoints, and in the future we'll update the terminology used. + +Originally, Redwood apps were intended to be deployed as serverless functions to AWS Lambda. Whenever a Redwood app is deployed to a "serverful" environment such as Fly or Render, a Fastify server is started and your Redwood app's functions in `api/src/functions` are automatically registered onto the server. Request adapters are also automatically configured to handle the translation between Fastify's request and reply objects to the functions' AWS Lambda signature. + +::: + +Redwood looks for serverless functions in `api/src/functions`. Each function is mapped to a URI based on its filename. For example, you can find `api/src/functions/graphql.js` at `http://localhost:8911/graphql`. + +## Creating Serverless Functions + +Creating serverless functions is easy with Redwood's function generator: + +```bash +yarn rw g function <name> +``` + +This will generate a stub serverless function in the folder `api/src/functions/<name>`, along with a test and an empty scenarios file. + +_Example of a bare minimum handler you need to get going:_ + +```jsx +export const handler = async (event, context) => { + return { + statusCode: 200, + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + data: '${name} function', + }), + } +} +``` + +## The handler + +For a lambda function to be a lambda function, it must export a handler that returns a status code. The handler receives two arguments: `event` and `context`. Whatever it returns is the `response`, which should include a `statusCode` at the very least. + +> **File/Folder Structure** +> +> For example, with a target function endpoint name of /hello, you could save the function file in one of the following ways: +> +> - `./api/src/functions/hello.{js,ts}` +> - `./api/src/functions/hello/hello.{js,ts}` +> - `./api/src/functions/hello/index.{js,ts}` +> +> Other files in the folder will _not_ be exposed as an endpoint + +### Re-using/Sharing code + +You can use code in `api/src` in your serverless function, some examples: + +```jsx +// importing `db` directly +import { db } from 'src/lib/db' + +// importing services +import { update } from 'src/services/subscriptions' + +// importing a custom shared library +import { reportError } from 'src/lib/errorHandling' +``` + +If you just want to move some logic into another file, that's totally fine too! + +```bash +api/src +├── functions +│   ├── graphql.ts +│   └── helloWorld +│   ├── helloWorld.scenarios.ts +│   ├── helloWorld.test.ts +│   └── helloWorld.ts # <-- imports hellWorldLib +│   └── helloWorldLib.ts # <-- exports can be used in the helloWorld +``` + +## Developing locally + +When you run `yarn rw dev` - it'll watch for changes and make your functions available at: + +- `localhost:8911/{functionName}` and +- `localhost:8910/.redwood/functions/{functionName}` (used by the web side). + +Note that the `.redwood/functions` path is determined by your setting in your [redwood.toml](app-configuration-redwood-toml.md#web) - and is used both in development and in the deployed Redwood app + +## Testing + +You can write tests and scenarios for your serverless functions very much like you would for services, but it's important to properly mock the information that the function `handler` needs. + +To help you mock the `event` and `context` information, we've provided several api testing fixture utilities: + +| Mock | Usage | +| ------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `mockHttpEvent` | Use this to mock out the http request `event` that is received by your function in unit tests. Here you can set `headers`, `httpMethod`, `queryStringParameters` as well as the `body` and if the body `isBase64Encoded`. The `event` contains information from the invoker as JSON-formatted string whose structure will vary. See [Working with AWS Lambda proxy integrations for HTTP APIs](https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-develop-integrations-lambda.html) for the payload format. | +| `mockContext` | Use this function to mock the http `context`. Your function handler receives a context object with properties that provide information about the invocation, function, and execution environment. See [AWS Lambda context object in Node.js](https://docs.aws.amazon.com/lambda/latest/dg/nodejs-context.html) for what context properties you can mock. | +| `mockSignedWebhook` | Use this function to mock a signed webhook. This is a specialized `mockHttpEvent` mock that also signs the payload and adds a signature header needed to verify that the webhook is trustworthy. See [How to Receive and Verify an Incoming Webhook](webhooks.md#how-to-receive-and-verify-an-incoming-webhook) to learn more about signing and verifying webhooks. | + +### How to Test Serverless Functions + +Let's learn how to test a serverless function by first creating a simple function that divides two numbers. + +As with all serverless lambda functions, the handler accepts an `APIGatewayEvent` which contains information from the invoker. +That means it will have the HTTP headers, the querystring parameters, the method (GET, POST, PUT, etc), cookies, and the body of the request. +See [Working with AWS Lambda proxy integrations for HTTP APIs](https://docs.aws.amazon.com/apigateway/latest/developerguide/http-api-develop-integrations-lambda.html) for the payload format. + +Let's generate our function: + +```bash +yarn rw generate function divide +``` + +We'll use the querystring to pass the `dividend` and `divisor` to the function handler on the event as seen here to divide 10 by 2. + +```bash +// request +http://localhost:8911/divide?dividend=10&divisor=2 +``` + +If the function can successfully divide the two numbers, the function returns a body payload back in the response with a [HTTP 200 Success](https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/200) status: + +```bash +// response +{"message":"10 / 2 = 5","dividend":"10","divisor":"2","quotient":5} +``` + +And, we'll have some error handling to consider the case when either the dividend or divisor is missing and return a [HTTP 400 Bad Request](https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/400) status code; or, if we try to divide by zero or something else goes wrong, we return a [500 Internal Server Error](https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/500). + +```tsx title="api/src/functions/divide/divide.ts" +import type { APIGatewayEvent } from 'aws-lambda' + +export const handler = async (event: APIGatewayEvent) => { + // sets the default response + let statusCode = 200 + let message = '' + + try { + // get the two numbers to divide from the event query string + const { dividend, divisor } = event.queryStringParameters + + // make sure the values to divide are provided + if (dividend === undefined || divisor === undefined) { + statusCode = 400 + message = `Please specify both a dividend and divisor.` + throw Error(message) + } + + // divide the two numbers + const quotient = parseInt(dividend) / parseInt(divisor) + message = `${dividend} / ${divisor} = ${quotient}` + + // check if the numbers could be divided + if (!isFinite(quotient)) { + statusCode = 500 + message = `Sorry. Could not divide ${dividend} by ${divisor}` + throw Error(message) + } + + return { + statusCode, + body: { + message, + dividend, + divisor, + quotient, + }, + } + } catch (error) { + return { + statusCode, + body: { + message: error.message, + }, + } + } +} +``` + +Sure, you could launch a browser or use Curl or some other manual approach and try out various combinations to test the success and error cases, but we want to automate the tests as part of our app's CI. + +That means we need to write some tests. + +#### Function Unit Tests + +To test a serverless function, you'll work with the test script associated with the function. You'll find it in the same directory as your function: + +```bash +api +├── src +│ ├── functions +│ │ ├── divide +│ │ │ ├── divide.ts +│ │ │ ├── divide.test.ts +``` + +The setup steps are to: + +- write your test cases by mocking the event using `mockHttpEvent` to contain the information you want to give the handler +- invoke the handler with the mocked event +- extract the result body +- test that the values match what you expect + +The boilerplate steps are generated automatically for you by the function generator +Let's look at a series of tests that mock the event with different information in each. + +First, let's write a test that divides 20 by 5 and we'll expect to get 4 as the quotient: + +```jsx title="api/src/functions/divideBy/divide.test.ts" +import { mockHttpEvent } from '@redwoodjs/testing/api' +import { handler } from './divide' + +describe('divide serverless function', () => { + it('divides two numbers successfully', async () => { + const httpEvent = mockHttpEvent({ + queryStringParameters: { + dividend: '20', + divisor: '5', + }, + }) + + const result = await handler(httpEvent) + const body = result.body + + expect(result.statusCode).toBe(200) + expect(body.message).toContain('=') + expect(body.quotient).toEqual(4) + }) +``` + +Then we can also add a test to handle the error when we don't provide a dividend: + +```jsx title="api/src/functions/divideBy/divide.test.ts" +it('requires a dividend', async () => { + const httpEvent = mockHttpEvent({ + queryStringParameters: { + divisor: '5', + }, + }) + + const result = await handler(httpEvent) + const body = result.body + expect(result.statusCode).toBe(400) + expect(body.message).toContain('Please specify both') + expect(body.quotient).toBeUndefined +}) +``` + +And finally, we can also add a test to handle the error when we try to divide by 0: + +```jsx + it('cannot divide by 0', async () => { + const httpEvent = mockHttpEvent({ + queryStringParameters: { + dividend: '20', + divisor: '0', + }, + }) + + const result = await handler(httpEvent) + const body = result.body + + expect(result.statusCode).toBe(500) + expect(body.message).toContain('Could not divide') + expect(body.quotient).toBeUndefined + }) +}) + +``` + +The `divide` function is a simple example, but you can use the `mockHttpEvent` to set any event values you handler needs to test more complex functions. + +You can also `mockContext` and pass the mocked `context` to the handler and even create scenario data if your function interacts with your database. For an example of using scenarios when test functions, please look at a specialized serverless function: the [webhook below](#how-to-test-webhooks). + +#### Running Function Tests + +To run an individual serverless function test: + +```bash +yarn rw test api divide +``` + +When the test run completes (and succeeds), you see the results: + +```bash + PASS api api/src/functions/divide/divide.test.ts (12.69 s) + divide serverless function + ✓ divides two numbers successfully (153 ms) + ✓ requires a dividend (48 ms) + ✓ requires a divisor (45 ms) + ✓ cannot divide by 0 (47 ms) + +Test Suites: 1 passed, 1 total +Tests: 4 passed, 4 total +Snapshots: 0 total +Time: 13.155 s +Ran all test suites matching /divide.test.ts|divide.test.ts|false/i. +``` + +If the test fails, you can update your function or test script and the test will automatically re-run. + +### Using Test Fixtures + +Often times your serverless function will have a variety of test cases, but because it may not interact with the database, you don't want to use scenarios (since that creates records in your test database). But, you still want a way to define these cases in a more declarative way for readability and maintainability -- and you can using fixtures. + +First, let's create a fixture for the `divide` function alongside your function and test as `divide.fixtures.ts`: + +```bash +api +├── src +│ ├── functions +│ │ ├── divide +│ │ │ ├── divide.ts +│ │ │ ├── divide.test.ts +│ │ │ ├── divide.fixtures.ts // <-- your fixture +``` + +Let's define a fixture for a new test case: when the function is invoked, but it is missing a divisor: + +```jsx title="api/src/functions/divide/divide.fixtures.ts" +import { mockHttpEvent } from '@redwoodjs/testing/api' + +export const missingDivisor = () => + mockHttpEvent({ + queryStringParameters: { + dividend: '20', + }, + }) +``` + +The `missingDivisor()` fixture constructs and mocks the event for the test case -- that is, we don't provide a divisor value in the `queryStringParameters` in the mocked http event. + +Now, let's use this fixture in a test by providing the handler with the event we mocked in the fixture: + +```jsx title="api/src/functions/divide/divide.test.ts" +import { missingDivisor } from './divide.fixtures' + +describe('divide serverless function', () => { + // ... other test cases + + it('requires a divisor', async () => { + const result = await handler(missingDivisor()) + + const body = result.body + + expect(result.statusCode).toBe(400) + expect(body.message).toContain('Please specify both') + expect(body.quotient).toBeUndefined + }) + + // ... +}) +``` + +Now, if we decide to change the test case date, we simply modify the fixture and re-run our tests. + +You can then define multiple fixtures to define all the cases in a central place, export each, and then use in your tests for more maintainable and readable tests. + +### How to Test Webhooks + +[Webhooks](webhooks.md) are specialized serverless functions that will verify a signature header to ensure you can trust the incoming request and use the payload with confidence. + +:::note + +Want to learn more about webhooks? See a [Detailed discussion of webhooks](webhooks.md) to find out how webhooks can give your app the power to create complex workflows, build one-to-one automation, and sync data between apps. + +::: + +In the following example, we'll have the webhook interact with our app's database, so we can see how we can use **scenario testing** to create data that the handler can access and modify. + +:::tip **Why testing webhooks is hard** + +Because your webhook is typically sent from a third-party's system, manually testing webhooks can be difficult. For one thing, you often have to create some kind of event in their system that will trigger the event -- and you'll often have to do that in a production environment with real data. Second, for each case you'll have to find data that represents each case and issue a hook for each -- which can take a lot of time and is tedious. + +Also, you'll be using production secrets to sign the payload. And finally, since your third-party needs to send you the incoming webhook you'll most likely have to launch a local tunnel to expose your development machine publicly in order to receive them. + +Instead, we can automate and mock the webhook to contain a signed payload that we can use to test the handler. + +By writing these tests, you can iterate and implement the webhook logic much faster and easier without having to rely on a third party to send you data, or setting up tunneling, or triggering events on the external system. + +::: + +For our webhook test example, we'll create a webhook that updates a Order's Status by looking up the order by its Tracking Number and then updating the status to by Delivered (if our rules allow it). + +Because we'll be interacting with data, our app has an `Order` model defined in the Prisma schema that has a unique `trackingNumber` and `status`: + +```jsx title="/api/db/schema.prisma" +model Order { + id Int @id @default(autoincrement()) + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + trackingNumber String @unique + status String @default("UNKNOWN") + + @@unique([trackingNumber, status]) +} +``` + +Let's generate our webhook function: + +```bash +yarn rw generate function updateOrderStatus +``` + +```bash +api +├── src +│ ├── functions +│ │ ├── updateOrderStatus +│ │ │ ├── updateOrderStatus.ts +│ │ │ ├── updateOrderStatus.scenarios.ts +│ │ │ ├── updateOrderStatus.test.ts + +``` + +The `updateOrderStatus` webhook will expect: + +- a signature header named `X-Webhook-Signature` +- that the signature in that header will signed using the [SHA256 method](webhooks.md#sha256-verifier-used-by-github-discourse) +- verify the signature and throw an [401 Unauthorized](https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/401) error if the event cannot be trusted (that is, it failed signature verification) +- if verified, then proceed to +- find the order by the tracking number provided +- check that the order's current status allows the status to be changed +- and if so, update the error and return the order and message +- or if not, return a [500 internal server error](https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/500) with a message that the order couldn't be updated + +```tsx +import type { APIGatewayEvent } from 'aws-lambda' +import { verifyEvent, VerifyOptions, WebhookVerificationError } from '@redwoodjs/api/webhooks' +import { db } from 'src/lib/db' + +export const handler = async (event: APIGatewayEvent) => { + let currentOrderStatus = 'UNKNOWN' + + try { + const options = { + signatureHeader: 'X-Webhook-Signature', + } as VerifyOptions + + verifyEvent('sha256Verifier', { + event, + secret: 'MY-VOICE-IS-MY-PASSPORT-VERIFY-ME', + options, + }) + + // Safely use the validated webhook payload body + const body = JSON.parse(event.body) + const trackingNumber = body.trackingNumber + const status = body.status + + // You can only update the status if the order's current status allows + switch (status) { + case 'PLACED': + currentOrderStatus = 'UNKNOWN' + break + case 'SHIPPED': + currentOrderStatus = 'PLACED' + break + case 'DELIVERED': + currentOrderStatus = 'SHIPPED' + break + default: + currentOrderStatus = 'UNKNOWN' + } + + // updated the order with the new status + // using the trackingNumber provided + const order = await db.order.update({ + where: { trackingNumber_status: { trackingNumber, status: currentOrderStatus } }, + data: { status: status }, + }) + + return { + statusCode: 200, // Success!!! + body: JSON.stringify({ + order, + message: `Updated order ${order.id} to ${order.status} at ${order.updatedAt}`, + }), + } + } catch (error) { + if (error instanceof WebhookVerificationError) { + return { + statusCode: 401, // Unauthorized + } + } else { + return { + statusCode: 500, // An error + body: JSON.stringify({ + error: error.message, + message: `Unable to update the order status`, + }), + } + } + } +} +``` + +#### Webhook Test Scenarios + +Since our `updateOrderStatus` webhook will query an order by its tracking number and then attempt to update its status, we'll want to seed our test run with some scenario data that helps us have records we can use to test that the webhook does what we expect it to in each situation. + +Let's create three orders for with different status: `PLACED`, `SHIPPED`, and `DELIVERED`. + +We'll use these to test that you cannot update an order to the delivered status unless it is currently "shipped:. + +We can refer to these individual orders in our tests as `scenario.order.placed`, `scenario.order.shipped` , or `scenario.order.delivered`. + +```tsx title="api/src/functions/updateOrderStatus/updateOrderStatus.scenarios.ts" +export const standard = defineScenario({ + order: { + placed: { + data: { trackingNumber: '1ZP1LC3D0Rd3R000001', status: 'PLACED' }, + }, + shipped: { + data: { trackingNumber: '1ZSH1PP3D000002', status: 'SHIPPED' }, + }, + delivered: { + data: { trackingNumber: '1ZD31IV3R3D000003', status: 'DELIVERED' }, + }, + }, +}) +``` + +#### Webhook Unit Tests + +The webhook test setup needs to: + +- import your api testing utilities, such as `mockSignedWebhook` +- import your function handler + +In each test scenario we will: + +- get the scenario order data +- create a webhook payload with a tracking number and a status what we want to change its order to +- mock and sign the webhook using `mockSignedWebhook` that specifies the verifier method, signature header, and the secret that will verify that signature +- invoke the handler with the mocked signed event +- extract the result body (and parse it since it will be JSON data) +- test that the values match what you expect + +In our first scenario, we'll use the shipped order to test that we can update the order given a valid tracking number and change its status to delivered: + +```tsx title="api/src/functions/updateOrderStatus/updateOrderStatus.test.ts" +import { mockSignedWebhook } from '@redwoodjs/testing/api' +import { handler } from './updateOrderStatus' + +describe('updates an order via a webhook', () => { + scenario('with a shipped order, updates the status to DELIVERED', + async (scenario) => { + + const order = scenario.order.shipped + + const payload = { trackingNumber: order.trackingNumber, + status: 'DELIVERED' } + + const event = mockSignedWebhook({ payload, + signatureType: 'sha256Verifier', + signatureHeader: 'X-Webhook-Signature', + secret: 'MY-VOICE-IS-MY-PASSPORT-VERIFY-ME' }) + + const result = await handler(event) + + const body = JSON.parse(result.body) + + expect(result.statusCode).toBe(200) + expect(body.message).toContain(`Updated order ${order.id}`) + expect(body.message).toContain(`to ${payload.status}`) + expect(body.order.id).toEqual(order.id) + expect(body.order.status).toEqual(payload.status) + }) +``` + +But, we also want to test what happens if the webhook receives an invalid signature header like `X-Webhook-Signature-Invalid`. + +Because the header isn't what the webhook expects (it wants to see a header named `X-Webhook-Signature`), this request is not verified and will return a 401 Unauthorized and not try to update the order at all. + +:::note + +For brevity we didn't test that the order's status wasn't changed, but that could be checked as well + +::: + +```jsx +scenario('with an invalid signature header, the webhook is unauthorized', async (scenario) => { + const order = scenario.order.placed + + const payload = { trackingNumber: order.trackingNumber, status: 'DELIVERED' } + const event = mockSignedWebhook({ + payload, + signatureType: 'sha256Verifier', + signatureHeader: 'X-Webhook-Signature-Invalid', + secret: 'MY-VOICE-IS-MY-PASSPORT-VERIFY-ME', + }) + + const result = await handler(event) + + expect(result.statusCode).toBe(401) +}) +``` + +Next, we test what happens if the event payload is signed, but with a different secret than it expects; that is it was signed using the wrong secret (`MY-NAME-IS-WERNER-BRANDES-VERIFY-ME` and not `MY-VOICE-IS-MY-PASSPORT-VERIFY-ME`). + +Again, we expect as 401 Unauthorized response. + +```jsx +scenario('with the wrong webhook secret the webhook is unauthorized', async (scenario) => { + const order = scenario.order.placed + + const payload = { trackingNumber: order.trackingNumber, status: 'DELIVERED' } + const event = mockSignedWebhook({ + payload, + signatureType: 'sha256Verifier', + signatureHeader: 'X-Webhook-Signature', + secret: 'MY-NAME-IS-WERNER-BRANDES-VERIFY-ME', + }) + + const result = await handler(event) + + expect(result.statusCode).toBe(401) +}) +``` + +Next, what happens if the order cannot be found? We'll try a tracking number that doesn't exist (that is we did not create it in our scenario order data): + +```jsx +scenario('when the tracking number cannot be found, returns an error', async (scenario) => { + const order = scenario.order.placed + + const payload = { trackingNumber: '1Z-DOES-NOT-EXIST', status: 'DELIVERED' } + const event = mockSignedWebhook({ + payload, + signatureType: 'sha256Verifier', + signatureHeader: 'X-Webhook-Signature', + secret: 'MY-VOICE-IS-MY-PASSPORT-VERIFY-ME', + }) + + const result = await handler(event) + + const body = JSON.parse(result.body) + + expect(result.statusCode).toBe(500) + expect(body).toHaveProperty('error') +}) +``` + +Last, we want to test a business rule that says you cannot update an order to be delivered if it already is delivered + +Therefore our scenario uses the `scenario.order.delivered` data where the order has a placed status. + +:::tip + +You'll have additional tests here to check that if the order is placed you cannot update it to be delivered and if the order is shipped you cannot update to be placed, etc + +::: + +```jsx + scenario('when the order has already been delivered, returns an error', + async (scenario) => { + const order = scenario.order.delivered + + const payload = { trackingNumber: order.trackingNumber, + status: 'DELIVERED'} + const event = mockSignedWebhook({payload, + signatureType: 'sha256Verifier', + signatureHeader: 'X-Webhook-Signature', + secret: 'MY-VOICE-IS-MY-PASSPORT-VERIFY-ME' }) + + const result = await handler(event) + + const body = JSON.parse(result.body) + + expect(result.statusCode).toBe(500) + expect(body).toHaveProperty('error') + expect(body.message).toEqual('Unable to update the order status') + }) +}) +``` + +As with other serverless function testing, you can also `mockContext` and pass the mocked context to the handler if your webhook requires that information. + +#### Running Webhook Tests + +To run an individual webhook test: + +```bash +yarn rw test api updateOrderStatus +``` + +When the test run completes (and succeeds), you see the results: + +```bash + PASS api api/src/functions/updateOrderStatus/updateOrderStatus.test.ts (10.3 s) + updates an order via a webhook + ✓ with a shipped order, updates the status to DELIVERED (549 ms) + ✓ with an invalid signature header, the webhook is unauthorized (51 ms) + ✓ with the wrong webhook secret the webhook is unauthorized (44 ms) + ✓ when the tracking number cannot be found, returns an error (54 ms) + ✓ when the order has not yet shipped, returns an error (57 ms) + ✓ when the order has already been delivered, returns an error (73 ms) + +Test Suites: 1 passed, 1 total +Tests: 6 passed, 6 total +Snapshots: 0 total +Time: 10.694 s, estimated 36 s +Ran all test suites matching /updateOrderStatus.test.ts|updateOrderStatus.test.ts|false/i. +``` + +If the test fails, you can update your function or test script and the test will automatically re-run. + +## Security considerations + +When deployed, **a custom serverless function is an open API endpoint and is your responsibility to secure appropriately**. 🔐 + +That means _anyone_ can access your function and perform any tasks it's asked to do. In many cases, this is completely appropriate and desired behavior. + +But, in some cases, for example when the function interacts with third parties, like sending email, or when it retrieves sensitive information from a database, you may want to ensure that only verified requests from trusted sources can invoke your function. + +And, in some other cases, you may even want to limit how often the function is called over a set period of time to avoid denial-of-service-type attacks. + +### Webhooks + +If your function receives an incoming Webhook from a third party, see [Webhooks](webhooks.md) in the RedwoodJS documentation to verify and trust its payload. + +### Serverless Functions with Redwood User Authentication + +Serverless functions can use the same user-authentication strategy used by GraphQL Directives to [secure your services](graphql.md#secure-services) via the `useRequireAuth` wrapper. + +:::tip + + If you need to protect an endpoint via authentication that isn't user-based, you should consider using [Webhooks](webhooks.md) with a signed payload and verifier. + +::: + +#### How to Secure a Function with Redwood Auth + +The `useRequireAuth` wrapper configures your handler's `context` so that you can use any of the `requireAuth`-related authentication helpers in your serverless function: + +- import `useRequireAuth` from `@redwoodjs/graphql-server` +- import your app's custom `getCurrentUser` and the `isAuthenticated` check from `src/lib/auth` +- import your auth provider's `authDecoder` +- implement your serverless function as you would, but do not `export` it (see `myHandler` below). +- pass your implementation, `getCurrentUser` and `authDecoder` to the `useRequireAuth` wrapper and export its return +- check if the user `isAuthenticated()` and, if not, handle the unauthenticated case by returning a `401` status code (for example) + +```tsx +import type { APIGatewayEvent, Context } from 'aws-lambda' + +// highlight-next-line +import { authDecoder } from '@redwoodjs/auth-dbauth-api' +// highlight-next-line +import { useRequireAuth } from '@redwoodjs/graphql-server' + +// highlight-next-line +import { getCurrentUser, isAuthenticated } from 'src/lib/auth' +import { logger } from 'src/lib/logger' + +const myHandler = async (event: APIGatewayEvent, context: Context) => { + logger.info('Invoked myHandler') + + // highlight-next-line + if (isAuthenticated()) { + logger.info('Access myHandler as authenticated user') + + return { + statusCode: 200, + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + data: 'myHandler function', + }), + } + // highlight-start + } else { + logger.error('Access to myHandler was denied') + + return { + statusCode: 401, + } + } + // highlight-end +} + +export const handler = useRequireAuth({ + handlerFn: myHandler, + getCurrentUser, + authDecoder, +}) +``` + +Now anywhere `context` is used, such as in services or when using `hasRole()` or `isAuthenticated()` from your `auth` lib, `currentUser` will be set and `requireAuth`-related functions will be able to verify the authentication state or if the user has the required roles. + +In short, you can now use any of your auth functions like `isAuthenticated()`, `hasRole()`, or `requireAuth()` in your serverless function. + +:::note + +If you intend to implement a feature that requires user authentication, then using GraphQL, auth directives, and services is the preferred approach. + +::: + +#### Using your Authenticated Serverless Function + +As there is no login flow when using functions, the `useRequireAuth` check assumes that your user is already authenticated and you have access to their JWT access token. + +In your request, you must include the following headers: + +- the auth provider type that your application is using, e.g. `dbAuth` +- the Bearer token (JWT access token) +- if using dbAuth, then also the dbAuth Cookie + +For example: + +```bash +Authorization: Bearer myJWT.accesstoken.signature +auth-provider: supabase +Content-Type: application/json +``` + + +### Other security considerations + +In addition to securing your serverless functions, you may consider logging, rate limiting and whitelisting as ways to protect your functions from abuse or misuse. + +#### Visibility via Logging + +Logging in production — and monitoring for suspicious activity, unknown IP addresses, errors, etc. — can be a critical part of keeping your serverless functions and your application safe. + +Third-party log services like [logFlare](https://logflare.app/), [Datadog](https://www.datadoghq.com/) and [LogDNA](https://www.logdna.com/) all have features that store logs for inspection, but also can trigger alerts and notifications if something you deem untoward occurs. + +See [Logger](logger.md) in the RedwoodJS docs for more information about how to setup and use logging services. + +#### Rate Limiting + +Rate limiting (or throttling) how often a function executes by a particular IP addresses or user account is a common way of stemming api abuse (for example, a distributed Denial-of-Service, or DDoS, attack). + +As LogRocket [says](https://blog.logrocket.com/rate-limiting-node-js/): + +:::info + +Rate limiting is a very powerful feature for securing backend APIs from malicious attacks and for handling unwanted streams of requests from users. In general terms, it allows us to control the rate at which user requests are processed by our server. + +::: + +API Gateways like [Kong](https://docs.konghq.com/hub/kong-inc/rate-limiting/) offer plugins to configure how many HTTP requests can be made in a given period of seconds, minutes, hours, days, months, or years. + +Currently, RedwoodJS does not offer rate limiting in the framework, but your deployment target infrastructure may. This is a feature RedwoodJS will investigate for future releases. + +For more information about Rate Limiting in Node.js, consider: + +- [Understanding and implementing rate limiting in Node.js](https://blog.logrocket.com/rate-limiting-node-js/) on LogRocket + +#### IP Address Whitelisting + +Because the `event` passed to the function handler contains the request's IP address, you could decide to whitelist only certain known and trusted IP addresses. + +```jsx +const ipAddress = ({ event }) => { + return event?.headers?.['client-ip'] || event?.requestContext?.identity?.sourceIp || 'localhost' +} +``` + +If the IP address in the event does not match, then you can raise an error and return `401 Unauthorized` status. + +## Returning Binary Data + +By default, RedwoodJS functions return strings or JSON. If you need to return binary data, your function will need to encode it as Base64 and then set the `isBase64Encoded` response parameter to `true`. Note that this is best suited to relatively small responses. The entire response body will be loaded into memory as a string, and many serverless hosting environments will limit your function to eg. 10 seconds, so if your file takes longer than that to process and download it may get cut off. For larger or static files, it may be better to upload files to an object store like S3 and generate a [pre-signed URL](https://stackoverflow.com/questions/38831829/nodejs-aws-sdk-s3-generate-presigned-url) that the client can use to download the file directly. + +Here's an example of how to return a binary file from the filesystem: + +```typescript title="api/src/functions/myCustomFunction.ts" +import type { APIGatewayEvent, Context } from 'aws-lambda' +import fs from 'fs' + +export const handler = async (event: APIGatewayEvent, context: Context) => { + const file = await fs.promises.readFile('/path/to/image.png') + + return { + statusCode: 200, + headers: { + 'Content-Type': 'image/png', + 'Content-Length': file.length, + }, + body: file.toString('base64'), + isBase64Encoded: true, + } +} +``` diff --git a/docs/versioned_docs/version-7.0/services.md b/docs/versioned_docs/version-7.0/services.md new file mode 100644 index 000000000000..52bb916f0a8a --- /dev/null +++ b/docs/versioned_docs/version-7.0/services.md @@ -0,0 +1,1136 @@ +--- +description: Put all your business logic in one place +--- + +# Services + +Redwood aims to put all your business logic in one place—Services. These can be used by your GraphQL API or any other place in your backend code. Redwood does all the annoying stuff for you, just write your business logic! + +## Overview + +What do we mean by "business logic?" [One definition](https://www.investopedia.com/terms/b/businesslogic.asp) states: "Business logic is the custom rules or algorithms that handle the exchange of information between a database and user interface." In Redwood, those custom rules and algorithms go in Services. You can't put that logic in the client because it's open to the world and could be manipulated. Imagine having the code to determine a valid withdrawal or deposit to someone's bank balance living in the client, and the server just receives API calls of where to move the money, doing no additional verification of those numbers! Your bank would quickly go insolvent. As you'll hear many times throughout our docs, and your development career—never trust the client. + +But how does the client get access to the output of these Services? By default, that's through GraphQL. GraphQL is an API, accessible to clients, that relies on getting data from "somewhere" before returning it. That somewhere is a function backed by what's known as a [**resolver**](https://graphql.org/learn/execution/) in GraphQL. And in Redwood, those resolvers are your Services! + +``` +┌───────────┐ ┌───────────┐ ┌───────────┐ +│ Browser │ ───> │ GraphQL │ ───> │ Service │ +└───────────┘ └───────────┘ └───────────┘ +``` + +Remember: Service are just functions. That means they can be used not only as GraphQL resolvers, but from other Services, or serverless functions, or anywhere else you invoke a function on the api side. + +> **Can I use Service functions on the web side?** +> +> The short answer is no because our build process doesn't support it yet. +> +> Generally, in a full-stack application, Services will concern themselves with getting data in and out of a database. The libraries we use for this, like Prisma, do not run in the browser. However, even if it did, it would happily pass on whatever SQL-equivalent commands you give it, like `db.user.deleteMany()`, which would remove all user records! That kind of power in the hands of the client would wreak havoc the likes of which you have never seen. + +Service functions can also call each other. For example, that theoretical Service function that handles transferring money between two accounts: it certainly comes in handy when a user initiates a transfer through a GraphQL call, but our business logic for what constitutes a transfer lives in that function. That function should be the only one responsible for moving money between two accounts, so we should make use of it anywhere we need to do a transfer—imagine an async task that moves $100 between a checking and savings account every 1st of the month. + +``` +┌───────────┐ ┌───────────┐ +│ Service │ ───> │ Service │ +└───────────┘ └───────────┘ +``` + +Finally, Services can also be called from [serverless functions](serverless-functions.md). Confusingly, these are also called "functions", but are meant to be run in a serverless environment where the code only exists long enough to complete a task and is then shut down. Redwood loves serverless functions. In fact, your GraphQL endpoint is, itself, a serverless function! In Redwood, these go in `api/src/functions`. Serverless functions can make use of Services, rather than duplicating business logic inside of themselves. In our bank transfer example, a third party service could initiate a webhook call to one of our serverless functions saying that Alice just got paid. Our (serverless) function can then call our (Service) function to make the transfer from the third party to Alice. + +``` +┌───────────────────────┐ ┌───────────┐ +│ Serverless Function │ ───> │ Service │ +└───────────────────────┘ └───────────┘ +``` + +## Service Validations + +Redwood includes a feature we call Service Validations. These simplify an extremely common task: making sure that incoming data is formatted properly before continuing. These validations are meant to be included at the start of your Service function and will throw an error if conditions are not met: + +```jsx +import { validate, validateWith, validateWithSync, validateUniqueness } from '@redwoodjs/api' + +export const createUser = async ({ input }) => { + validate(input.firstName, 'First name', { + presence: true, + exclusion: { in: ['Admin', 'Owner'], message: 'That name is reserved, sorry!' }, + length: { min: 2, max: 255 } + }) + validateWithSync(() => { + if (input.role === 'Manager' && !context.currentUser.roles.includes('admin')) { + throw 'Only Admins can create new Managers' + } + }) + await validateWith(async () => { + const inviteCount = await db.invites.count({ where: { userId: currentUser.id } }) + if (inviteCount >= 10) { + throw 'You have already invited your max of 10 users' + } + }) + + return validateUniqueness('user', { username: input.username }, (db) => { + return db.user.create({ data: input }) + }) +} +``` + +> **What's the difference between Service Validations and Validator Directives?** +> +> [Validator Directives](directives.md#validators) were added to Redwood in v0.37 and provide a way to validate whether data going through GraphQL is allowed based on the user that's currently requesting it (the user that is logged in). These directives control *access* to data, while Service Validators operate on a different level, outside of GraphQL, and make sure data is formatted properly before, most commonly, putting it into a database. +> +> You could use these in combination to, for example, prevent a client from accessing the email addresses of any users that aren't themselves (Validator Directives) while also verifying that when creating a user, an email address is present, formatted correctly, and unique (Service Validations). + +### Displaying to the User + +If you're using [Redwood's scaffolds](cli-commands.md#generate-scaffold) then you'll see requisite error messages when trying to save a form that runs into these validation errors automatically: + +![image](https://user-images.githubusercontent.com/300/138919184-89eddd9e-8ee7-4956-b7ed-ba8daaa0f6ea.png) + +Otherwise you'll need to use the `error` property that you can [destructure](https://www.apollographql.com/docs/react/data/mutations/#executing-a-mutation) from `useMutation()` and display an element containing the error message (Redwood's [form helpers](/docs/forms) will do some of the heavy lifting for you for displaying the error): + +```jsx {13,21} +import { Form, FormError, Label, TextField, Submit } from '@redwoodjs/forms' +import { useMutation } from '@redwoodjs/web' + +const CREATE_CONTACT = gql` + mutation CreateContactMutation($input: ContactInput!) { + createContact(input: $input) { + id + } + } +` + +const ContactPage = () => { + const [create, { loading, error }] = useMutation(CREATE_CONTACT) + + const onSubmit = (data) => { + create({ variables: { input: data }}) + } + + return ( + <Form onSubmit={onSubmit}> + <FormError error={error}> + <Label name="email">Email Address</Label> + <TextField name="email" /> + <Submit disabled={loading}>Save</Submit> + </Form> + ) +} +``` + +### Importing + +You'll import the three functions below from `@redwoodjs/api`: + +```jsx +import { validate, validateWith, validateUniqueness } from '@redwoodjs/api' +``` + +### validate() + +This is the main function to call when you have a piece of data to validate. There are two forms of this function call, one with 2 arguments and one with 3. The first argument is always the variable to validate and the last argument is an object with all the validations you want to run against the first argument. The (optional) second argument is the name of the field to be used in a default error message if you do not provide a custom one: + +```jsx +// Two argument form: validate(value, validations) +validate(input.email, { email: { message: 'Please provide a valid email address' } }) + +// Three argument form: validate(value, name, validations) +validate(input.email, 'Email Address', { email: true } +``` + +All validations provide a generic error message if you do not specify one yourself (great for quickly getting your app working). In the three argument version, you provide the "name" of the field (in this case `'Email Address'`) and that will be used in the error message: + +``` +Email Address must be formatted like an email address +``` + +Using the two argument version will use your custom error message in the validations object properties: + +``` +Please provide a valid email address +``` + +#### Multiple Validations + +You can provide multiple validations in the last argument object, some with custom messages and some without. If you include only *some* custom messages, make sure to use the 3-argument version as the ones without custom messages will need a variable name to include their messages: + +```jsx +validate(input.name, 'Name', { + presence: true, + exclusion: { + in: ['Admin', 'Owner'], + message: 'Sorry that name is reserved' + }, + length: { + min: 2, + max: 255, + message: 'Please provide a name at least two characters long, but no more than 255' + }, + format: { + pattern: /^[A-Za-z]+$/, + message: 'Name can only contain letters' + } +) +``` + +Note that the validations object properties often take two forms: a simple form without a custom message, and a nested object when you do need a custom message: + +```jsx +{ email: true } +{ email: { message: 'Must provide an email' } } + +{ exclusion: ['Admin', 'Owner'] } +{ exclusion: { in: ['Admin', 'Owner' ], message: 'That name is reserved' } } +``` + +This keeps the syntax as simple as possible when a custom message is not required. Details on the options for each validation are detailed below. + +#### Absence + +Requires that a field NOT be present, meaning it must be `null` or `undefined`. +Opposite of the [presence](#presence) validator. + +```jsx +validate(input.value, 'Value', { + absence: true +}) +``` + +##### Options + +* `allowEmptyString` will count an empty string as being absent (that is, `null`, `undefined` and `""` will pass this validation) + +```jsx +validate(input.honeypot, 'Honeypot', { + absence: { allowEmptyString: true } +}) +``` + +* `message`: a message to be shown if the validation fails + +```jsx +validate(input.value, { + absence: { message: 'Value must be absent' } +}) +``` + +#### Acceptance + +Requires that the passed value be `true`, or within an array of allowed values that will be considered "true". + +```jsx +validate(input.terms, 'Terms of Service', { + acceptance: true +}) +``` + +##### Options + +* `in`: an array of values that, if any match, will pass the validation + +```jsx +validate(input.terms, 'Terms of Service', { + acceptance: { in: [true, 'true', 1, '1'] } +}) +``` + +* `message`: a custom message if validation fails + +```jsx +validate(input.terms, { + acceptance: { message: 'Please accept the Terms of Service' } +}) +``` + +#### Email + +Requires that the value be formatted like an email address by comparing against a regular expression. The regex is extremely lax: `/^[^@\s]+@[^.\s]+\.[^\s]+$/` This says that the value: + +* Must start with one or more characters that aren't a whitespace or literal `@` +* Followed by a `@` +* Followed by one or more characters that aren't a whitespace or literal `.` +* Followed by a `.` +* Ending with one or more characters that aren't whitespace + +Since the [official email regex](http://www.ex-parrot.com/~pdw/Mail-RFC822-Address.html) is around 6,300 characters long, we though this one was good enough. If you have a different, preferred email validation regular expression, use the [format](#format) validation. + +```jsx +validate(input.email, 'Email', { + email: true +}) +``` + +##### Options + +* `message`: a custom message if validation fails + +```jsx +validate(input.email, { + email: { message: 'Please provide a valid email address' +}) +``` + +#### Exclusion + +Requires that the given value *not* equal to any in a list of given values. Opposite of the [inclusion](#inclusion) validation. + +```jsx +validate(input.name, 'Name', { + exclusion: ['Admin', 'Owner'] +}) +``` + +##### Options + +* `in`: the list of values that cannot be used +* `caseSensitive`: toggles case sensitivity; default: `true` + +```jsx +validate(input.name, 'Name', { + exclusion: { in: ['Admin', 'Owner'] } +}) +``` + +* `message`: a custom error message if validation fails + +```jsx +validate(input.name, { + exclusion: { + in: ['Admin', 'Owner'], + message: 'That name is reserved, try another' + } +}) +``` + +#### Format + +Requires that the value match a given regular expression. + +```jsx +validate(input.usPhone, 'US Phone Number', { + format: /^[0-9-]{10,12}$/ +}) +``` + +##### Options + +* `pattern`: the regular expression to use + +```jsx +validate(input.usPhone, 'US Phone Number', { + format: { pattern: /^[0-9-]{10,12}$/ } +}) +``` + +* `message`: a custom error message if validation fails + + +```jsx +validate(input.usPhone, { + format: { + pattern: /^[0-9-]{10,12}$/, + message: 'Can only contain numbers and dashes' + } +}) +``` + +#### Inclusion + +Requires that the given value *is* equal to one in a list of given values. Opposite of the [exclusion](#exclusion) validation. + +```jsx +validate(input.role, 'Role', { + inclusion: ['Guest', 'Member', 'Manager'] +}) +``` + +##### Options + +* `in`: the list of values that can be used +* `caseSensitive`: toggles case sensitivity; default: `true` + +```jsx +validate(input.role, 'Role', { + inclusion: { in: ['Guest', 'Member', 'Manager'] } +}) +``` + +* `message`: a custom error message if validation fails + +```jsx +validate(input.role, 'Role', { + inclusion: { + in: ['Guest', 'Member', 'Manager'] , + message: 'Please select a proper role' + } +}) +``` + +#### Length + +Requires that the value meet one or more of a number of string length validations. + +```jsx +validate(input.answer, 'Answer', { + length: { min: 6, max: 200 } +}) +``` + +##### Options + +* `min`: must be at least this number of characters long + +```jsx +validate(input.name, 'Name', { + length: { min: 2 } +}) +``` + +* `max`: must be no more than this number of characters long + +```jsx +validate(input.company, 'Company', { + length: { max: 255 } +}) +``` + +* `equal`: must be exactly this number of characters long + +```jsx +validate(input.pin, 'PIN', { + length: { equal: 4 } +}) +``` + +* `between`: convenience syntax for defining min and max as an array + +```jsx +validate(input.title, 'Title', { + length: { between: [2, 255] } +}) +``` + +* `message`: a custom message if validation fails. Can use length options as string interpolations in the message itself, including `name` which is the name of the field provided in the second argument + +```jsx +validate(input.title, 'Title', { + length: { min: 2, max: 255, message: '${name} must be between ${min} and ${max} characters' } +}) +``` + +> Note that you cannot use backticks to define the string here—that would cause the value(s) to be interpolated immediately, and `min` and `max` are not actually available yet. This must be a plain string using single or double quotes, but using the `${}` interpolation syntax inside. + +#### Numericality + +The awesomely-named Numericality Validation requires that the value passed meet one or more criteria that are all number related. + +```jsx +validate(input.year, 'Year', { + numericality: { greaterThan: 1900, lessThanOrEqual: 2021 } +}) +``` + +##### Options + +* `integer`: the number must be an integer + +```jsx +validate(input.age, 'Age', { + numericality: { integer: true } +}) +``` + +* `lessThan`: the number must be less than the given value + +```jsx +validate(input.temp, 'Temperature', { + numericality: { lessThan: 100 } +}) +``` + +* `lessThanOrEqual`: the number must be less than or equal to the given value + +```jsx +validate(input.temp, 'Temperature', { + numericality: { lessThanOrEqual: 100 } +}) +``` + +* `greaterThan`: the number must be greater than the given value + +```jsx +validate(input.temp, 'Temperature', { + numericality: { greaterThan: 32 } +}) +``` + +* `greaterThanOrEqual`: the number must be greater than or equal to the given number + +```jsx +validate(input.temp, 'Temperature', { + numericality: { greaterThanOrEqual: 32 } +}) +``` + +* `equal`: the number must be equal to the given number + +```jsx +validate(input.guess, 'Guess', { + numericality: { equal: 6 } +}) +``` + +* `otherThan`: the number must not be equal to the given number + +```jsx +validate(input.floor, 'Floor', { + numericality: { otherThan: 13 } +}) +``` + +* `even`: the number must be even + +```jsx +validate(input.skip, 'Skip', { + numericality: { even: true } +}) +``` + +* `odd`: the number must be odd + +```jsx +validate(input.zenGarden, 'Zen Garden', { + numericality: { odd: true } +}) +``` + +* `positive`: the number must be positive (greater than 0) + +```jsx +validate(input.balance, 'Balance', { + numericality: { positive: true } +}) +``` + +* `negative`: the number must be negative (less than 0) + +```jsx +validate(input.debt, 'Debt', { + numericality: { negative: true } +}) +``` + +* `message`: a custom message if validation fails. Some options can be used in string interpolation: `lessThan`, `lessThanOrEqual`, `greaterThan`, `greaterThanOrEqual`, `equal`, and `otherThan` + +```jsx +validate(input.floor, 'Floor', { + numericality: { otherThan: 13, message: 'You cannot go to floor ${otherThan}' } +}) +``` + +> Note that you cannot use backticks to define the string here—that would cause the value(s) to be interpolated immediately. This must be a plain string using single or double quotes, but using the `${}` interpolation syntax inside. + +#### Presence + +Requires that a field be present, meaning it must not be `null` or `undefined`. +Opposite of the [absence](#absence) validator. + +```jsx +validate(input.value, 'Value', { + presence: true +}) +``` + +##### Options + +* `allowNull`: whether or not to allow `null` to be considered present (default is `false`) + +```jsx +validate(input.value, 'Value', { + presence: { allowNull: true } +}) +// `null` passes +// `undefined` fails +// "" passes +``` + +* `allowUndefined`: whether or not to allow `undefined` to be considered present (default is `false`) + +```jsx +validate(input.value, 'Value', { + presence: { allowUndefined: true } +}) +// `null` fails +// `undefined` passes +// "" passes +``` + +* `allowEmptyString`: whether or not to allow an empty string `""` to be considered present (default is `true`) + +```jsx +validate(input.value, 'Value', { + presence: { allowEmptyString: false } +}) +// `null` fails +// `undefined` fails +// "" fails +``` + +* `message`: a message to be shown if the validation fails + +```jsx +validate(input.lastName, { + presence: { allowEmptyString: false, message: "Can't leave last name empty" } +}) +``` + +#### Custom + +Run a custom validation function passed as `with` which should either throw or return nothing. +If the function throws an error, the error message will be used as the message of the validation error associated with the field. + +```jsx +validate(input.value, 'Value', { + custom: { + with: () => { + if (isInvalid) { + throw new Error('Value is invalid') + } + } + } +}) +``` + +##### Options + +* `message`: a custom error message if validation fails + +```jsx +validate(input.value, 'Value', { + custom: { + with: () => { + if (isInvalid) { + throw new Error('Value is invalid') + } + }, + message: 'Please specify a different value' + } +}) +``` +### validateWithSync() + +`validateWithSync()` is simply given a function to execute. This function should throw with a message if there is a problem, otherwise do nothing. + +```jsx +validateWithSync(() => { + if (input.name === 'Name') { + throw "You'll have to be more creative than that" + } +}) + +validateWithSync(() => { + if (input.name === 'Name') { + throw new Error("You'll have to be more creative than that") + } +}) +``` + +Either of these errors will be caught and re-thrown as a `ServiceValidationError` with your text as the `message` of the error (although technically you should always throw errors with `new Error()` like in the second example). + +You could just write your own function and throw whatever you like, without using `validateWithSync()`. But, when accessing your Service function through GraphQL, that error would be swallowed and the user would simply see "Something went wrong" for security reasons: error messages could reveal source code or other sensitive information so most are hidden. Errors thrown by Service Validations are considered "safe" and allowed to be shown to the client. + +### validateWith() + +The same behavior as `validateWithSync()` but works with Promises. Remember to `await` the validation. + +```jsx +await validateWith(async () => { + if (await db.products.count() >= 100) { + throw "There can only be a maximum of 100 products in your store" + } +}) +``` + +### validateUniqueness() + +This validation guarantees that the field(s) given in the first argument are unique in the database before executing the callback given in the last argument. If a record is found with the given fields then an error is thrown and the callback is not invoked. + +The uniqueness guarantee is handled through Prisma's [transaction API](https://www.prisma.io/docs/concepts/components/prisma-client/transactions). Given this example validation: + +```jsx +return validateUniqueness('user', { username: input.username }, (db) => { + return db.user.create({ data: input }) +}) +``` + +It is functionally equivalent to: + +```jsx +return await db.$transaction(async (db) => { + if (await db.user.findFirst({ username: input.username })) { + throw new ServiceValidationError('Username is not unique') + } else { + return db.user.create({ data: input }) + } +}) +``` + +So `validateUniqueness()` first tries to find a record with the given fields, and if found raise an error, if not then executes the callback. + +> **Why use this when the database can verify uniqueness with a UNIQUE INDEX database constraint?** +> +> You may be in a situation where you can't have a unique index (supporting a legacy schema, perhaps), but still want to make sure the data is unique before proceeding. There is also the belief that you shouldn't have to count on the database to validate your data—that's a core concern of your business logic, and your business logic should live in your Services in a Redwood app. +> +> Another issue is that the error raised by Prisma when a record validates a unique index is swallowed by GraphQL and so you can't report it to the user (there are still ways around this, but it involves catching and re-throwing a different error). The error raised by `validateUniqueness()` is already safe-listed and allowed to be sent to the browser. + +#### Arguments + +1. The name of the db table accessor that will be checked (what you would call on `db` in a normal Prisma call). If you'd call `db.user` then this value is `"user"`. +2. An object, containing the db fields/values to check for uniqueness, like `{ email: 'rob@redwoodjs.com' }`. Can also include additional options explained below that provide for a narrower scope for uniqueness requirements, and a way for the record to identify itself and not create a false positive for an existing record. +3. [Optional] An object with options. `message` - custom error message. `db` - custom instance of the PrismaClient to use +4. Callback to be invoked if record is found to be unique. + +In its most basic usage, say you want to make sure that a user's email address is unique before creating the record. `input` is an object containing all the user fields to save to the database, including `email` which must be unique: + +```jsx +const createUser = (input) => { + return validateUniqueness('user', { email: input.email }, (db) => { + return db.user.create({ data: input }) + }) +} +``` + +You can provide a custom message if the validation failed with the optional third argument: + +```jsx +const createUser = (input) => { + return validateUniqueness('user', + { email: input.email }, + { message: 'Your email is already in use' }, + (db) => db.user.create({ data: input }) + ) +} +``` + +You can provide the PrismaClient to be used for the transaction and callback. +```jsx +import { db } from 'src/lib/db' + +const createUser = (input) => { + return validateUniqueness('user', + { email: input.email }, + { db }, + (db) => db.user.create({ data: input }) + ) +} +``` + +> If you are overwriting the DATABASE_URL in your `src/lib/db` instantiation of the PrismaClient, you need to use this option. If not provided, a vanilla `new PrismaClient()` is used to run the callback that will not respect any custom configurations not represented in your `prisma.schema` + +Be sure that both your callback and the surrounding `validateUniqueness()` function are `return`ed or else your service function will have nothing to return to its consumers, like GraphQL. + +##### $self + +What about updating an existing record? In its default usage, an update with this same `validateUniqueness` check will fail because the existing record will be found in the database and so think the email address is already in use, even though its in use by itself! In this case, pass an extra `$self` prop to the list of fields containing a check on how to identify the record as itself: + +```jsx +const updateUser = (id, input) => { + return validateUniqueness('user', { + email: input.email, + $self: { id } + }, (db) => db.user.create({ data: input }) +} +``` + +Now the check for whether a record exists will exclude those records whose `id` is the same as this record's `id`. + +##### $scope + +Sometimes we may only want to check uniqueness against a subset of records, say only those owned by the same user. Two different users can create the same blog post with the same title, but a single user can't create two posts with the same title. If the `Post` table contains a foreign key to the user that created it, called `userId`, we can use that to **scope** the uniqueness check: + +```jsx +const createPost = (input) => { + return validateUniqueness('post', { + title: input.title, + $scope: { userId: context.currentUser.id } + }, (db) => { + return db.user.create({ data: input }) + }) +} +``` + +This makes sure that the user that's logged in and creating the post cannot reuse the same blog post title as one of their own posts. + +## Caching + +Redwood provides a simple [LRU cache](https://www.baeldung.com/java-lru-cache) for your services. With an LRU cache you never need to worry about manually expiring or updating cache items. You either read an existing item (if its **key** is found) or create a new cached item if it isn't. This means that over time the cache will get bigger and bigger until it hits a memory or disk usage limit, but you don't care: the cache software is responsible for removing the oldest/least used members to make more room. For many applications, its entire database resultset may fit in cache! + +How does a cache work? At its simplest, a cache is just a big chunk of memory or disk that stores key/value pairs. A unique key is used to lookup a value—the value being what you wanted to cache. The trick with a cache is selecting a key that makes the data unique among all the other data being cached, but that it itself (the key) contains enough uniqueness that you can safely discard it when something in the computed value changes, and you want to save a new value instead. More on that in [Choosing a Good Key](#choosing-a-good-key) below. + +Why use a cache? If you have an expensive or time-consuming process in your service that doesn't change on every request, this is a great candidate. For example, for a store front, you may want to show the most popular products. This may be computed by a combination of purchases, views, time spent on the product page, social media posts, or a whole host of additional information. Aggregating this data may take seconds or more, but the list of popular products probably doesn't change that often. There's no reason to make every user wait all that time just to see the same list of products. With service caching, just wrap this computation in the `cache()` function, and give it an expiration time of 24 hours, and now the result is returned in milliseconds for every user (except the first one in a 24 hour period, it has to be computed from scratch and then stored in the cache again). You can even remove this first user's wait by "warming" the cache: trigging the service function by a process you run on the server, rather than by a user's first visit, on a 24 hour schedule so that it's the one that ends up waiting for the results to be computed. + +:::info What about GraphQL caching? + +You could also cache data at the [GraphQL layer](https://community.redwoodjs.com/t/guide-power-of-graphql-caching/2624) which has some of the same benefits. Using Envelop plugins you can add a response cache _after_ your services (resolver functions in the context of GraphQL) run - with a global configuration. + +However, by placing the cache one level "lower," at the service level, you get the benefit of caching even when one service calls another internally, or when a service is called via another serverless function, and finer grained control of what you're caching. + +In our example above you could cache the GraphQL query for the most popular products. But if you had an internal admin function which was a different query, augmenting the popular products with additional information, you now need to cache that query as well. With service caching, that admin service function can call the same popular product function that's already cached and get the speed benefit automatically. + +::: + +### Clients + +As of this writing, Redwood ships with clients for the two most popular cache backends: [Memcached](https://memcached.org/) and [Redis](https://redis.io/). Service caching wraps each of these in an adapter, which makes it easy to add more clients in the future. If you're interested in adding an adapter for your favorite cache client, [open a issue](https://github.com/redwoodjs/redwood/issues) and tell us about it! Instructions for getting started with the code are [below](#creating-your-own-client). + +:::info + +If you need to access functionality in your cache client that the `cache()` and `cacheFindMany()` functions do not handle, you can always get access to the underlying raw client library and use it however you want: + +```javascript +import { cacheClient } from 'src/lib/cache' + +export const updatePost = async ({ id, input }) => { + const post = await db.post.update({ + data: input, + where: { id }, + }) + // highlight-next-line + await cacheClient.MSET(`post-${id}`, JSON.stringify(post), `blogpost-${id}`, JSON.stringify(post)) + + return post +} +``` + +::: + +### What Can Be Cached + +The service cache mechanism can only store strings, so whatever data you want to cache needs to be able to survive a round trip through `JSON.stringify()` and `JSON.parse()`. That means that if you have a real `Date` instance, you'd need to re-initialize it as a `Date`, because it's going to return from the cache as a string like `"2022-08-24T17:50:05.679Z"`. + +A function will not survive being serialized as a string so those are right out. + +Most Prisma datasets can be serialized just fine, as long as you're mindful of dates and things like BLOBs, which may contain binary data and could get mangled. + +We have an [outstanding issue](https://github.com/redwoodjs/redwood/issues/6282) which will add support for caching instances of custom classes and getting them back out of the cache as that instance, rather than a generic object which you would normally get after a `JSON.stringify`! + +### Expiration + +You can set a number of seconds after which to automatically expire the key. After this time the call to `cache()` will set the key/value in the store again. See the function descriptions below for usage examples. + +### Choosing a Good Key + +As the old saying goes "there are only two hard problems in computer science: cache, and naming things." The reason cache is included in this list is, funnily enough, many times because of naming something—the key for the cache. + +Consider a product that you want to cache. At first thought you may think "I'll use the name of the product as its key" and so your key is `led light strip`. One problem is that you must make absolutely sure that your product name is unique across your shop. This may not be a viable solution for your store: you could have two manufacturers with the same product name. + +Okay, let's use the product's database ID as the key: `41443`. It's definitely going to be unique now, but what if you later add a cache for users? Could a user record in the database have that same ID? Probably, so now you may think you're retrieving a cached user, but you'll get the product instead. + +What if we add a "type" into the cache key, so we know what type of thing we're caching: `product-41442`. Now we're getting somewhere. Users will have a cache key `user-41442` and the two won't clash. But what happens if you change some data about that product, like the description? Remember that we can only get an existing key/value, or create a key/value in the cache, we can't update an existing key. How we can encapsulate the "knowledge" that a product's data has changed into the cache key? + +One solution would be to put all of the data that we care about changing into the key, like: `product-41442-${description}`. The problem here is that keys can only be so long (in Memcached it's 250 bytes). Another option could be to hash the entire product object and use that as the key (this can encompass the `product` part of the key as well as the ID itself, since *any* data in the object being different will result in a new hash): + +```js +import { md5 } from "blueimp-md5" + +cache(md5(JSON.stringify(product)), () => { + // ... +}) +``` + +This works, but it's the nicest to look at in the code, and computing a hash isn't free (it's fast, but not 0 seconds). + +For this reason we always recommend that you add an `updatedAt` column to all of your models. This will automatically be set by Prisma to a timestamp whenever it updates that row in the database. This means we can count on this value being different whenever a record changes, regardless of what column actually changed. Now our key can look like `product-${id}-${updatedAt.getTime()}`. We use `getTime()` so that the timestamp is returned as a nice integer `1661464626032` rather than some string like `Thu Aug 25 2022 14:56:25 GMT-0700 (Pacific Daylight Time)`. + +:::info + +If you're using [Redwood Record](/docs/redwoodrecord) pretty soon you'll be able to cache a record by just passing the instance as the key, and it will automatically create the same key behind the scenes for you: + +```js +cache(product, () => { + // ... +}) +``` +::: + +One drawback to this key is in potentially responding to *too many* data changes, even ones we don't care about caching. Imagine that a product has a `views` field that tracks how many times it has been viewed in the browser. This number will be changing all the time, but if we don't display that count to the user then we're constantly re-creating the cache for the product even though no data the user will see is changing. There's no way to tell Prisma "set the `updatedAt` when the record changes, but not if the `views` column changes." This cache key is too variable. One solution would be to move the `views` column to another table with a `productId` pointing back to this record. Now the `product` is back to just containing data we care about caching. + +What if you want to expire a cache regardless of whether the data itself has changed? Maybe you make a UI change where you now show a product's SKU on the page where you didn't before. You weren't previously selecting the `sku` field out of the database, and so it hasn't been cached. But now that you're showing it you'll need to add it the list of fields to return from the service. One solution would be forcibly update all of the `updatedAt` fields in the database. But a) Prisma won't easily let you do this since it think it controls that column, and b) every product is going to appear to have been edited at the same time, when in fact nothing changed—you just needed to bust the cache. + +An easier solution to this problem would be to add some kind of version number to your cache key that you are in control of and can change whenever you like. Something like appending a `v1` to the key: `v1-product-${id}-${updatedAt}` + +And this key is our final form: a unique, but flexible key that allows us to expire the cache on demand (change the version) or automatically expire it when the record itself changes. + +:::info + +One more case: what if the underlying `Product` model itself changes, adding a new field, for example? Each product will now have new data, but no changes will occur to `updatedAt` as a result of adding this column. There are a couple things you could do here: + +* Increment the version on the key, if you have one: `v1` => `v2` +* "Touch" all of the Product records in a script, forcing them to have their `updatedAt` timestamp changed +* Incorporate a hash of all the keys of a `product` into the cache key + +How does that last one work? We get a list of all the keys and then apply a hashing algorithm like MD5 to get a string that's unique based on that list of database columns. Then if one is ever added or removed, the hash will change, which will change the key, which will bust the cache: + +```javascript +const product = db.product.findUnique({ where: { id } }) +const columns = Object.keys(product) // ['id', 'name', 'sku', ...] +const hash = md5(columns.join(',')) // "e4d7f1b4ed2e42d15898f4b27b019da4" + +cache(`v1-product-${hash}-${id}-${updatedAt}`, () => { + // ... +}) +``` + +Note that this has the side effect of having to select at least one record from the database so that you know what the column names are, but presumably this is much less overhead that whatever computation you're trying to avoid by caching: the slow work that happens inside of the function passed to `cache()` will still be avoided on subsequent calls (and selecting a single record from the database by an indexed column like `id` should be very fast). + +::: + +#### Expiration-based Keys + +You can skirt these issues about what data is changing and what to include or not include in the key by just setting an expiration time on this cache entry. You may decide that if a change is made to a product, it's okay if users don't see the change for, say, an hour. In this case just set the expiration time to 3600 seconds and it will automatically be re-built, whether something changed in the record or not: + +```js +cache(`product-${id}`, () => { + // ... +}, { expires: 3600 }) +``` + +This leads to your product cache being rebuilt every hour, even though you haven't made any changes that are of consequence to the user. But that may be we worth the tradeoff versus rebuilding the cache when *no* useful data has changed (like the `views` column being updated). + +#### Global Cache Key Prefix + +Just like the `v1` we added to the `product` cache key above, you can globally prefix a string to *all* of your cache keys: + +```js title="api/src/lib/cache.js" +export const { cache, cacheFindMany } = createCache(client, { + logger, + timeout: 500, + // highlight-next-line + prefix: 'alpha', +}) +``` + +This would turn a key like `posts-123` into `alpha-posts-123` before giving it to the cache client. If you prefixed with `v1` in the individual cache key, you'd now have `alpha-v1-posts-123`. + +This gives you a nuclear option to invalidate all cache keys globally in your app. Let's say you launched a new redesign, or other visual change to your site where you may be showing more or less data from your GraphQL queries. If your data was purely based on the DB data (like `id` and `updatedAt`) there would be no way to refresh all of these keys without changing each and every cache key manually in every service, or by manually updating *all* `updatedAt` timestamps in the database. This gives you a fallback to refreshing all data at once. + +#### Caching User-specific Data + +Sometimes you want to cache data unique to a user. Imagine a Recommended Products feature on our store: it should recommend products based on the user's previous purchase history, views, etc. In this case we'd way to include something unique about the user itself in the key: + +```js +cache(`recommended-${context.currentUser.id}`, () => { + // ... +}) +``` + +If every page the user visits has a different list of recommended products for every page (meaning that the full computation will need to run at least once, before it's cached) then creating this cache may not be worth it: how often does the user revisit the same product page more than once? Conversely, if you show the *same* recommended products on every page then this cache would definitely improve the user's experience. + +The *key* to writing a good key (!) is to think carefully about the circumstances in which the key needs to expire, and include those bits of information into the key string/array. Adding caching can lead to weird bugs you don't expect, but in these cases the root cause will usually be the cache key not containing enough bits of information to expire it correctly. When in doubt, restart the app with the cache server (memcached or redis) disabled and see if the same behavior is still present. If not, the cache key is the culprit! + +### Setup + +We have a setup command which creates a file `api/src/lib/cache.js` and include basic initialization for Memcached or Redis: + +```bash +yarn rw setup cache memcached +yarn rw setup cache redis +``` + +This generates the following (memcached example shown): + +```js title="api/src/lib/cache.js" +import { createCache, MemcachedClient } from '@redwoodjs/api/cache' + +import { logger } from './logger' + +const memJsFormattedLogger = { + log: (msg) => logger.error(msg), +} + +let client +try { + client = new MemcachedClient('localhost:11211', { + logger: memJsFormattedLogger, + }) +} catch (e) { + console.error(`Could not connect to cache: ${e.message}`) +} + +export const { cache, cacheFindMany } = createCache(client, { + logger, + timeout: 500, +}) +``` + +When the time comes, you can replace the hardcoded `localhost:11211` with an ENV var that can be set per-environment. + +#### Logging + +You'll see two different instances of passing `logger` as arguments here. The first: + +```js +client = new MemcachedClient(process.env.CACHE_SERVER, { + logger: memJsFormattedLogger, +}) +``` + +passes it to the `MemcachedClient` initializer, which passes it on to the MemJS library underneath so that it (MemJS) can report errors. `memJsFormattedLogger` just wraps the Redwood logger call in another function, which is the format expected by the MemJS library. + +The second usage of the logger argument: + +```js +export const { cache, cacheFindMany } = createCache(client, { + logger, + timeout: 500 +}) +``` + +is passing it to Redwood's own service cache code, so that it can log cache hits, misses, or errors. + +#### Options + +There are several options you can pass to the `createCache()` call: + +* `logger`: an instance of the Redwood logger. Defaults to `null`, but if you want any feedback about what the cache is doing, make sure to set this! +* `timeout`: how long to wait for the cache server to respond during a get/set before giving up and just executing the function containing what you want to cache and returning the result directly. Defaults to `500` milliseconds. +* `prefix`: a global cache key prefix. Defaults to `null`. +* `fields`: an object that maps the model field names for the `id` and `updatedAt` fields if your database has another name for them. For example: `fields: { id: 'post_id', updatedAt: 'updated_at' }`. Even if only one of your names is different, you need to provide both properties to this option. Defaults to `{ id: 'id', updatedAt: 'updatedAt' }` + +### `cache()` + +Use this function when you want to cache some data, optionally including a number of seconds before it expires: + +```js +// cache forever +const post = ({ id }) => { + return cache(`posts`, () => { + return db.post.findMany() + }) +} + +// cache for 1 hour +const post = ({ id }) => { + return cache(`posts`, () => { + return db.post.findMany() + }, { expires: 3600 }) +} +``` + +Note that a key can be a string or an array: + +```js +const post = ({ id }) => { + return cache(`posts-${id}-${updatedAt.getTime()}`, () => { + return db.post.findMany() + }) +} + +// or + +const post = ({ id }) => { + return cache(['posts', id, updatedAt.getTime()], () => { + return db.post.findMany() + }) +} +``` + +:::info + +`cache()` returns a Promise so you'll want to `await` it if you need the data for further processing in your service. If you're only using your service as a GraphQL resolver then you can just return `cache()` directly. + +::: + +### `cacheFindMany()` + +Use this function if you want to cache the results of a `findMany()` call from Prisma, but only until one or more of the records in the set is updated. This is sort of a best of both worlds cache scenario where you can cache as much data as possible, but also expire and re-cache as soon as any piece of it changes, without going through every record manually to see if it's changed: whenever *any* record changes the cache will be discarded. + +This function will always execute a `findFirst()` query to get the latest record that's changed, then use its `id` and `updatedAt` timestamp as the cache key for the full query. This means you'll always incur the overhead of a single DB call, but not the bigger `findMany()` unless something has changed. Note you still need to include a cache key prefix: + +```js +const post = ({ id }) => { + return cacheFindMany(`users`, db.user) +} +``` + +The above is the simplest usage example. If you need to pass a `where`, or any other object that `findMany()` accepts, include a `conditions` key in an object as the third argument: + +```js +const post = ({ id }) => { + return cacheFindMany(`users`, db.user, { + conditions: { where: { roles: 'admin' } } + }) +} +``` + +This is functionally equivalent to the following: + +```js +const latest = await db.user.findFirst({ + where: { roles: 'admin' } }, + orderBy: { updatedAt: 'desc' }, + select: { id: true, updatedAt: true } +}) + +return cache(`posts-${latest.id}-${latest.updatedAt.getTime()}`, () => { + return db.post.findMany({ where: { roles: 'admin' } }) +}) +``` + +If you also want to pass an `expires` option, do it in the same object as `conditions`: + +```js +const post = ({ id }) => { + return cacheFindMany( + `users`, db.user, { + conditions: { where: { roles: 'admin' } }, + expires: 86400 + } + ) +} +``` + +:::info + +`cacheFindMany()` returns a Promise so you'll want to `await` it if you need the data for further processing in your service. If you're only using your service as a GraphQL resolver than you can just return the Promise. + +::: + +### `deleteCacheKey()` + +There may be instances where you want to explicitly remove something from the cache so that it gets re-created with the same cache key. A good example is caching a single user, using only their `id` as the cache key. By default, the cache would never bust because a user's `id` is not going to change, no matter how many other fields on user are updated. With `deleteCacheKey()` you can choose to delete the key, for example, when the `updateUser()` service is called. The next time `user()` is called, it will be re-cached with the same key, but it will now contain whatever data was updated. + +```javascript +import { cache, deleteCacheKey } from 'src/lib/cache' + +const user = ({ id }) => { + return cache(`user-${id}`, () => { + return db.user.findUnique({ where: { id } }) + }) +}) + +const updateUser = async ({ id, input }) => { + await deleteCacheKey(`user-${id}`) + return db.user.update({ where: { id }, data: { input } }) +}) +``` + +:::warning + +When explicitly deleting cache keys like this you could find yourself going down a rabbit hole. What if there is another service somewhere that also updates user? Or another service that updates an organization, as well as all of its underlying child users at the same time? You'll need to be sure to call `deleteCacheKey()` in these places as well. As a general guideline, it's better to come up with a cache key that encapsulates any triggers for when the data has changed (like the `updatedAt` timestamp, which will change no matter who updates the user, anywhere in your codebase). + +Scenarios like this are what people are talking about when they say that caching is hard! + +::: + + +### Testing what you cache +We wouldn't just give you all of these caching APIs and not show you how to test it right? You'll find all the details in the [Caching section in the testing doc](testing.md#testing-caching). + +### Creating Your Own Client + +If Memcached or Redis don't serve your needs, you can create your own client adapter. In the Redwood codebase take a look at `packages/api/src/cache/clients` as a reference for writing your own. The interface is extremely simple: + +* Extend from the `BaseClient` class. +* A constructor that takes whatever arguments you want, passing them through to the client's initialization code. +* A `get()` function that accepts a `key` argument and returns the data from the cache if found, otherwise `null`. Note that in the Memcached and Redis clients the value returned is first run through `JSON.parse()` but if your cache client supports native JS objects then you wouldn't need to do this. +* A `set()` function that accepts a string `key`, the `value` to be cached, and an optional `options` object containing at least an `expires` key. Note that `value` can be a real JavaScript objects at this point, but in Memcached and Redis the value is run through `JSON.stringify()` before being sent to the client library. You may or may not need to do the same thing, depending on what your cache client supports. diff --git a/docs/versioned_docs/version-7.0/storybook.md b/docs/versioned_docs/version-7.0/storybook.md new file mode 100644 index 000000000000..0bfc41bfd573 --- /dev/null +++ b/docs/versioned_docs/version-7.0/storybook.md @@ -0,0 +1,85 @@ +--- +description: A component-driven development workflow +--- + +# Storybook + +Storybook enables a kind of frontend-first, component-driven development workflow that we've always wanted. +By developing your UI components in isolation, you get to focus exclusively on your UI's needs, +saving you from getting too caught up in the details of your API too early. + +Storybook also makes debugging a lot easier. +You don't have to start the dev server, login as a user, tab through dropdowns, and click buttons just for that one bug to show up. +Or render a whole page and make six GraphQL calls just to change the color of a modal. +You can set it all up as a story, tweak it there as you see fit, and even test it for good measure. + +## Getting Started with Storybook + +You can start Storybook with `yarn rw storybook`: + +``` +yarn rw storybook +``` + +If this is your first time running Storybook, the Redwood CLI will install it. +From then on, you can run it straightaway. +Once Storybook is installed, it'll spin up on port `7910`. + +## Configuring Storybook + +You only have to configure Storybook if you want to extend Redwood's default configuration, which handles things like how to find stories, configuring Webpack, starting Mock Service Worker, etc. + +There are two files you can add to your project's `web/config` directory to configure Storybook: `storybook.config.js` and `storybook.preview.js`. Note that you may have to create the `web/config` directory: + +``` +cd redwood-project/web +mkdir config +touch config/storybook.config.js config/storybook.preview.js +``` + +`storybook.config.js` configures Storybook's server and `storybook.preview.js` configures the way stories render. +All of these files get merged with Redwood's default configurations, which you can find in the `@redwoodjs/testing` package: + +- [main.js](https://github.com/redwoodjs/redwood/blob/main/packages/testing/config/storybook/main.js)—gets merged with your project's `storybook.config.js` +- [preview.js](https://github.com/redwoodjs/redwood/blob/main/packages/testing/config/storybook/preview.js)—gets merged with your project's `storybook.preview.js` + +### Configuring the Server with `storybook.config.js` + +:::tip You may have to restart Storybook + +Since `storybook.config.js` configures Storybook's server, changes you make may require restarting Storybook. + +::: + +While you can configure [any of Storybook server's available options](https://storybook.js.org/docs/react/configure/overview#configure-your-storybook-project) in `storybook.config.js`, you'll probably only want to configure `addons`: + +```javascript title="web/config/storybook.config.js" +module.exports = { + /** + * This line adds all of Storybook's essential addons. + * + * @see {@link https://storybook.js.org/addons/tag/essentials} + */ + addons: ['@storybook/addon-essentials'], +} +``` + +### Configuring Rendering with `storybook.preview.js` + +Sometimes you want to change the way all your stories render. +It'd be mixing concerns to add that logic to your actual components, and it'd get old fast to add it to every single `.stories.{jsx,tsx}` file. +Instead decorate all your stories with any custom rendering logic you want in `storybook.preview.js`. + +For example, something you may want to do is add some margin to all your stories so that they're not glued to the top left corner: + +```jsx title="web/config/storybook.preview.js" +export const decorators = [ + (Story) => ( + <div style={{ margin: '48px' }}> + <Story /> + </div> + ), +] +``` + +For more, see the Storybook docs on [configuring how stories render](https://storybook.js.org/docs/react/configure/overview#configure-story-rendering). diff --git a/docs/versioned_docs/version-7.0/studio.md b/docs/versioned_docs/version-7.0/studio.md new file mode 100644 index 000000000000..09694100cc93 --- /dev/null +++ b/docs/versioned_docs/version-7.0/studio.md @@ -0,0 +1,125 @@ +--- +description: RedwoodJS Studio is a package used during development to gain runtime insights into a project. +--- + +# Studio + +RedwoodJS Studio is a package used during development to gain runtime insights into a project. + +## Motivation + +Redwood provides tools that lets developers "get to work on what makes your application special, instead of wasting cycles choosing and re-choosing various technologies and configurations."[1](https://github.com/redwoodjs/redwood/blob/main/README.md). + +Much happens while your app processes a request: Invoke a function; handle a GraphQL request; resolve the request with a service; build and execute a SQL statement; connect to the database; handle the query response; further resolve the response so in contains all the data needed; return the result ... and more. + +While [logging](https://redwoodjs.com/docs/logger) can show you some of these steps, there is no easy way to see how they relate to each other, compare, or break down individual timings. Observability needed to debug, iterate, try out, and refactor your code is lacking. + +We hope Studio helps solve this problem with an observability tool that combines: + +* Tracing with OpenTelemetry (service and GraphQL) + +* SQL statement logging + +* general metrics (how many invocations) + +* GraphiQL playground with impersonated authentication + +With Studio, it is easier to: + +* identify slow running SQL statements without reviewing captured log files + +* identify and improve N+1 queries by comparing before and after traces + +* impersonate the user authentication headers in GraphiQL + +Redwood Studio is a command line tool which offers a web UI aimed at providing insights into your application via OpenTelemetry ingestion and other development conveniences like auth-impersonation within GraphiQL. + +### Demo +<div class="video-container"> + <iframe width="560" height="315" src="https://www.youtube.com/embed/zAViN-J-iFs?si=YywnOvMT1Fy3hKzd" title="YouTube video player" frameborder="0" allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture; web-share" allowfullscreen></iframe> +</div> + +### Setup +There is no setup needed to begin using the studio; simply execute the following command to start the studio at `localhost:4318`: +```bash +yarn rw studio +``` +The first time you run this command it will likely install the studio package which may take a small amount of time. + +#### OpenTelemetry +If you want studio to pick up telemetry from you app automatically please ensure you've setup opentelemetry. A guide on this can be found [here](https://community.redwoodjs.com/t/opentelemetry-support-experimental/4772) + +### Features + +#### GraphiQL Auth Impersonation + +You need to configure Studio using settings inside `redwood.toml` for auth +impersonation to work. See the sections below for detailed information. + +##### DbAuth + +Requires a `SESSION_SECRET` environment variable for cookie encryption. + +`redwood.toml` example: + +```toml +[studio.graphiql.authImpersonation] + authProvider = "dbAuth" + email = "user@example.com" + userId = "1" +``` + +##### Netlify + +Since Netlify does not expose the JWT secret used to sign the token in +production, impersonation requires a `jwtSecret` to encode and decode the auth +token. + +`redwood.toml` example: + +```toml +[studio.graphiql.authImpersonation] + authProvider = "netlify" + email = "user@example.com" + userId = "1" + jwtSecret = "some-secret-setting" +``` + +##### Supabase + +Requires a `SUPABASE_JWT_SECRET` environment variable for JWT signing. + +`redwood.toml` example: + +```toml +[studio.graphiql.authImpersonation] + authProvider = "supabase" + email = "user@example.com" + userId = "1" +``` + +### Settings + +All settings for Studio are located in `redwood.toml`, which you can find at +the root of your Redwood project. + +* `[studio.graphiql.authImpersonation].*` – Used to gain access to GraphQL + endpoints that require authentication. See section above on auth + impersonation for more details. +* `[studio].basePort` – Studio's web front-end will run on this port (default: + 4318). It is also used to calculate the port for the mailer integration and + other things. Please choose a port that is not already in use, and that has a + few more free ports available next to it. + +### Database File +Studio stores the ingested telemetry to `studio/prisma.db` within the +`.redwood` folder. You should not need to touch this file other than if you +wish to delete it to erase any existing telemetry data. + +## Availability +Along the release of Redwood v7, Studio has been rewritten and is available as +a stable version. Just run `yarn rw studio` to start it! Prior to RW v7 Studio +was available as an experimental feature. If you're still not using the stable +version of RW v7, see this forum topic for information on the old experimental +version of Studio +https://community.redwoodjs.com/t/redwood-studio-experimental/4771 diff --git a/docs/versioned_docs/version-7.0/testing.md b/docs/versioned_docs/version-7.0/testing.md new file mode 100644 index 000000000000..46304dbaa622 --- /dev/null +++ b/docs/versioned_docs/version-7.0/testing.md @@ -0,0 +1,2095 @@ +--- +description: A comprehensive reference for testing your app +--- + +# Testing + +Testing. For some it's an essential part of their development workflow. For others it's something they know they *should* do, but for whatever reason it hasn't struck their fancy yet. For others still it's something they ignore completely, hoping the whole concept will go away. But tests are here to stay, and maybe Redwood can change some opinions about testing being awesome and fun. + +## Introduction to Testing + +If you're already familiar with the ins and outs of testing and just want to know how to do it in Redwood, feel free to [skip ahead](#redwood-and-testing). Or, keep reading for a refresher. In the following section, we'll build a simple test runner from scratch to help clarify the concepts of testing in our minds. + +## Building a Test Runner + +The idea of testing is pretty simple: for each "unit" of code you write, you write additional code that exercises that unit and makes sure it works as expected. What's a "unit" of code? That's for you to decide: it could be an entire class, a single function, or even a single line! In general, the smaller the unit, the better. Your tests will stay fast and focused on just one thing, which makes them easy to update when you refactor. The important thing is that you start *somewhere* and codify your code's functionality in a repeatable, verifiable way. + +Let's say we write a function that adds two numbers together: + +```jsx +const add = (a, b) => { + return a + b +} +``` + +You test this code by writing another piece of code (which usually lives in a separate file and can be run in isolation), just including the functionality from the real codebase that you need for the test to run. For our examples here we'll put the code and its test side-by-side so that everything can be run at once. Our first test will call the `add()` function and make sure that it does indeed add two numbers together: + +```jsx {5-9} +const add = (a, b) => { + return a + b +} + +if (add(1, 1) === 2) { + console.log('pass') +} else { + console.error('fail') +} +``` + +Pretty simple, right? The secret is that this simple check *is the basis of all testing*. Yes, that's it. So no matter how convoluted and theoretical the discussions on testing get, just remember that at the end of the day you're testing whether a condition is true or false. + +### Running a Test + +You can [run that code with Node](https://nodejs.dev/learn/run-nodejs-scripts-from-the-command-line) or just copy/paste it into the [web console of a browser](https://developers.google.com/web/tools/chrome-devtools/console/javascript). You can also run it in a dedicated web development environment like JSFiddle. Switch to the **Javascript** tab below to see the code: + +<iframe width="100%" height="300" src="//jsfiddle.net/cannikin/mgy4ja1q/2/embedded/result,js/dark/" allowfullscreen="allowfullscreen" allowpaymentrequest frameborder="0" class="border"></iframe> + +> Note that you'll see `document.write()` in the JSFiddle examples instead of `console.log`; this is just so that you can actually see something in the **Result** tab, which is HTML output. + +You should see "pass" written to the output. To verify that our test is working as expected, try changing the `+` in the `add()` function to a `-` (effectively turning it into a `subtract()` function) and run the test again. Now you should see "fail". + +### Terminology + +Let's get to some terminology: + +* The entire code block that checks the functionality of `add()` is what's considered a single **test** +* The specific check that `add(1, 1) === 2` is known as an **assertion** +* The `add()` function itself is the **subject** of the test, or the code that is **under test** +* The value you expect to get (in our example, that's the number `2`) is sometimes called the **expected value** +* The value you actually get (whatever the output of `add(1, 1)` is) is sometimes called the **actual** or **received value** +* The file that contains the test is a **test file** +* Multiple test files, all run together, is known as a **test suite** +* You'll generally run your test files and suites with another piece of software. In Redwood that's Jest, and it's known as a **test runner** +* The amount of code you have that is exercised by tests is referred to as **coverage** and is usually reported as a percentage. If every single line of code is touched as a result of running your test suite then you have 100% coverage! + +This is the basic idea behind all the tests you'll write: when you add code, you'll add another piece of code that uses the first and verifies that the result is what you expect. + +Tests can also help drive new development. For example, what happens to our `add()` function if you leave out one of the arguments? We can drive these changes by writing a test of what we *want* to happen, and then modify the code that's being tested (the subject) to make it satisfy the assertion(s). + +### Expecting Errors + +So, what does happen if we leave off an argument when calling `add()`? Well, what do we *want* to happen? We'll answer that question by writing a test for what we expect. For this example let's have it throw an error. We'll write the test first that expects the error: + +```jsx +try { + add(1) +} catch (e) { + if (e === 'add() requires two arguments') { + console.log('pass') + } else { + console.error('fail') + } +} +``` + +This is interesting because we actually *expect* an error to be thrown, but we don't want that error to stop the test suite in it's tracks—we want the error to be raised, we just want to make sure it's exactly what we expect it to be! So we'll surround the code that's going to error in a try/catch block and inspect the error message. If it's what we want, then the test actually passes. + +> Remember: we're testing for what we *want* to happen. Usually you think of errors as being "bad" but in this case we *want* the code to throw an error, so if it does, that's actually good! Raising an error passes the test, not raising the error (or raising the wrong error) is a failure. + +Run this test and what happens? (If you previously made a change to `add()` to see the test fail, change it back now): + +<iframe width="100%" height="300" src="//jsfiddle.net/cannikin/mgy4ja1q/6/embedded/result,js/dark/" allowfullscreen="allowfullscreen" allowpaymentrequest frameborder="0" class="border"></iframe> + +Where did *that* come from? Well, our subject `add()` didn't raise any errors (Javascript doesn't care about the number of arguments passed to a function) and so it tried to add `1` to `undefined`, and that's Not A Number. We didn't think about that! Testing is already helping us catch edge cases. + +To respond properly to this case we'll make one slight modification: add another "fail" log message if the code somehow gets past the call to `add(1)` *without* throwing an error: + +```jsx {3,8} +try { + add(1) + console.error('fail: no error thrown') +} catch (e) { + if (e === 'add() requires two arguments') { + console.log('pass') + } else { + console.error('fail: wrong error') + } +} +``` + +We also added a little more information to the "fail" messages so we know which one we encountered. Try running that code again and you should see "fail: no error thrown" in the console. + +<iframe width="100%" height="300" src="//jsfiddle.net/cannikin/mgy4ja1q/7/embedded/result,js/dark/" allowfullscreen="allowfullscreen" allowpaymentrequest frameborder="0" class="border"></iframe> + +Now we'll actually update `add()` to behave as we expect: by throwing an error if less than two arguments are passed. + +```jsx +const add = (...nums) => { + if (nums.length !== 2) { + throw 'add() requires two arguments' + } + return nums[0] + nums[1] +} +``` + +Javascript doesn't have a simple way to check how many arguments were passed to a function, so we've converted the incoming arguments to an array via [spread syntax](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Spread_syntax) and then we check the length of that instead. + +<iframe width="100%" height="300" src="//jsfiddle.net/cannikin/mgy4ja1q/10/embedded/result,js/dark/" allowfullscreen="allowfullscreen" allowpaymentrequest frameborder="0" class="border"></iframe> + +We've covered passing too few arguments, what if we pass too many? We'll leave writing that test as homework, but you should have everything you need, and you won't even need any changes to the `add()` function to make it work! + +### Our Test Runner Compared to Jest + +Our tests are a little verbose (10 lines of code to test that the right number of arguments were passed). Luckily, the test runner that Redwood uses, Jest, provides a simpler syntax for the same assertions. Here's the complete test file, but using Jest's provided helpers: + +```jsx +describe('add()', () => { + it('adds two numbers', () => { + expect(add(1, 1)).toEqual(2) + }) + + it('throws an error for too few arguments', () => { + expect(() => add(1)).toThrow('add requires 2 arguments') + }) +}) +``` + +Jest lets us be very clear about our subject in the first argument to the `describe()` function, letting us know what we're testing. Note that it's just a string and doesn't have to be exactly the same as the function/class you're testing (but usually is for clarity). + +Likewise, each test is given a descriptive name as the first argument to the `it()` functions ("it" being the subject under test). Functions like `expect()` and `toEqual()` make it clear what values we expect to receive when running the test suite. If the expectation fails, Jest will indicate that in the output letting us know the name of the test that failed and what went wrong (the expected and actual values didn't match, or an error was thrown that we didn't expect). + +Jest also has a nicer output than our cobbled-together test runner using `console.log`: + +![image](https://user-images.githubusercontent.com/300/105783200-c6974680-5f2a-11eb-98af-d1884ecf2f99.png) + +Are you convinced? Let's keep going and see what Redwood brings to the table. + +## Redwood and Testing + +Redwood relies on several packages to do the heavy lifting, but many are wrapped in Redwood's own functionality which makes them even better suited to their individual jobs: + +* [Jest](https://jestjs.io/) +* [React Testing Library](https://testing-library.com/docs/react-testing-library/intro/) +* [Mock Service Worker](https://mswjs.io/) or **msw** for short. + +Redwood Generators get your test suite bootstrapped. Redwood also includes [Storybook](https://storybook.js.org/), which isn't technically a test suite, but can help in other ways. + +Let's explore each one and how they're integrated with Redwood. + +### Jest + +[Jest](https://jestjs.io/) is Redwood's test runner. By default, starting Jest via `yarn rw test` will start a watch process that monitors your files for changes and re-runs the test(s) that are affected by that changed file (either the test itself, or the subject under test). + +### React Testing Library + +[React Testing Library](https://testing-library.com/docs/react-testing-library/intro/) is an extension of [DOM Testing Library](https://testing-library.com/docs/dom-testing-library/intro), adding functionality specifically for React. React Testing Library lets us render a single component in isolation and test that expected text is present or a certain HTML structure has been built. + +### Mock Service Worker + +Among other things, Mock Service Worker (msw) lets you simulate the response from API calls. Where this comes into play with Redwood is how the web-side constantly calls to the api-side using GraphQL: rather than make actual GraphQL calls, which would slow down the test suite and put a bunch of unrelated code under test, Redwood uses MSW to intercept GraphQL calls and return a canned response, which you include in your test. + +### Storybook + +Storybook itself doesn't appear to be related to testing at all—it's for building and styling components in isolation from your main application—but it can serve as a sanity check for an overlooked part of testing: the user interface. Your tests will only be as good as you write them, and testing things like the alignment of text on the page, the inclusion of images, or animation can be very difficult without investing huge amounts of time and effort. These tests are also very brittle since, depending on how they're written, they can break without any code changes at all! Imagine an integration with a CMS that allows a marketing person to make text/style changes. These changes will probably not be covered by your test suite, but could make your site unusable depending on how bad they are. + +Storybook can provide a quick way to inspect all visual aspects of your site without the tried-and-true method of having a QA person log in and exercise every possible function. Unfortunately, checking those UI elements is not something that Storybook can automate for you, and so can't be part of a continuous integration system. But it makes it *possible* to do so, even if it currently requires a human touch. + +### Redwood Generators + +Redwood's generators will include test files for basic functionality automatically with any Components, Pages, Cells, or Services you generate. These will test very basic functionality, but they're a solid foundation and will not automatically break as soon as you start building out custom features. + +## Test Commands + +You can use a single command to run your entire suite : + +```bash +yarn rw test +``` + +This will start Jest in "watch" mode which will continually run and monitor the file system for changes. If you change a test or the component that's being tested, Jest will re-run any associated test file. This is handy when you're spending the afternoon writing tests and always want to verify the code you're adding without swapping back and forth to a terminal and pressing `↑` `Enter` to run the last command again. + +To start the process without watching, add the `--no-watch` flag: + +```bash +yarn rw test --no-watch +``` + +This one is handy before committing some changes to be sure you didn't inadvertently break something you didn't expect, or before a deploy to production. + + +### Filtering what tests to run + +You can run only the web- or api-side test suites by including the side as another argument to the command: + +```bash +yarn rw test web +yarn rw test api +``` + +Let's say you have a test file called `CommentForm.test.js`. In order to only watch and run tests in this file you can run + +```bash +yarn rw test CommentForm +``` + +If you need to be more specific, you can combine side filters, with other filters + +```bash +yarn rw test api Comment +``` +which will only run test specs matching "Comment" in the API side + +## Testing Components + +Let's start with the things you're probably most familiar with if you've done any React work (with or without Redwood): components. The simplest test for a component would be matching against the exact HTML that's rendered by React (this doesn't actually work so don't bother trying): + +```jsx title="web/src/components/Article/Article.js" +const Article = ({ article }) => { + return <article>{ article.title }</article> +} + +// web/src/components/Article/Article.test.js + +import { render } from '@redwoodjs/testing/web' +import Article from 'src/components/Article' + +describe('Article', () => { + it('renders an article', () => { + expect(render(<Article article={ title: 'Foobar' } />)) + .toEqual('<article>Foobar</article>') + }) +}) +``` + +This test (if it worked) would prove that you are indeed rendering an article. But it's also extremely brittle: any change to the component, even adding a `className` attribute for styling, will cause the test to break. That's not ideal, especially when you're just starting out building your components and will constantly be making changes as you improve them. + +:::info Why do we keep saying this test won't work? +Because as far as we can tell there's no easy way to simply render to a string. `render` actually returns an object that has several functions for testing different parts of the output. Those are what we'll look into in the next section. + +Note that Redwood's `render` function is based on React Testing Library's. The only difference is that Redwood's wraps everything with mock providers for the various providers in Redwood, such as auth, the GraphQL client, the router, etc. + +If you were to use React Testing Library's `render` function, you'd need to provide your own wrapper function. In this case you probably want to compose the mock providers from `@redwoodjs/testing/web`: + +```jsx +import { render, MockProviders } from '@redwoodjs/testing/web' + +// ... + +render(<Article article={ title: 'Foobar' } />, { + wrapper: ({ children }) => ( + <MockProviders> + <MyCustomProvider>{children}</MyCustomProvider> + </MockProviders> + ) +}) +``` +::: + +### Mocking useLocation + +To mock `useLocation` in your component tests, wrap the component with `LocationProvider`: + +```jsx +import { LocationProvider } from '@redwoodjs/router' + +render( + <LocationProvider location={{ pathname: '', search: '?cancelled=true' }}> + <Component /> + </LocationProvider> +) +``` + +### Mocking useParams + +To mock `useParams` in your component tests, wrap the component with `ParamsProvider`: + +```jsx +import { ParamsProvider } from '@redwoodjs/router'; + +render( + <ParamsProvider allParams={{ param1: 'val1', param2: 'val2' }}> + <Component /> + </ParamsProvider> +) +``` + +The `allParams` argument accepts an object that will provide parameters as you expect them from the query parameters of a URL string. In the above example, we are assuming the URL looks like `/?param1=val1¶m2=val2`. + +### Queries + +In most cases you will want to exclude the design elements and structure of your components from your test. Then you're free to redesign the component all you want without also having to make the same changes to your test suite. Let's look at some of the functions that React Testing Library provides (they call them "[queries](https://testing-library.com/docs/queries/about/)") that let you check for *parts* of the rendered component, rather than a full string match. + +#### getByText() + +In our **<Article>** component it seems like we really just want to test that the title of the product is rendered. *How* and *what it looks like* aren't really a concern for this test. Let's update the test to just check for the presence of the title itself: + +```jsx {3,7-9} title="web/src/components/Article/Article.test.js" +import { render, screen } from '@redwoodjs/testing/web' + +describe('Article', () => { + it('renders an article', () => { + render(<Article article={ title: 'Foobar' } />) + + expect(screen.getByText('Foobar')).toBeInTheDocument() + }) +}) +``` + +Note the additional `screen` import. This is a convenience helper from React Testing Library that automatically puts you in the `document.body` context before any of the following checks. + +We can use `getByText()` to find text content anywhere in the rendered DOM nodes. `toBeInTheDocument()` is a [matcher](https://jestjs.io/docs/en/expect) added to Jest by React Testing Library that returns true if the `getByText()` query finds the given text in the document. + +So, the above test in plain English says "if there is any DOM node containing the text 'Foobar' anywhere in the document, return true." + +#### queryByText() + +Why not use `getByText()` for everything? Because it will raise an error if the text is *not* found in the document. That means if you want to explicitly test that some text is *not* present, you can't—you'll always get an error. + +Consider an update to our **<Article>** component: + +```jsx title="web/src/components/Article/Article.js" +import { Link, routes } from '@redwoodjs/router' + +const Article = ({ article, summary }) => { + return ( + <article> + <h1>{article.title}</h1> + <div> + {summary ? article.body.substring(0, 100) + '...' : article.body} + {summary && <Link to={routes.article(article.id)}>Read more</Link>} + </div> + </article> + ) +} + +export default Article +``` + +If we're only displaying the summary of an article then we'll only show the first 100 characters with an ellipsis on the end ("...") and include a link to "Read more" to see the full article. A reasonable test for this component would be that when the `summary` prop is `true` then the "Read more" text should be present. If `summary` is `false` then it should *not* be present. That's where `queryByText()` comes in (relevant test lines are highlighted): + +```jsx {22} title="web/src/components/Article/Article.test.js" +import { render, screen } from '@redwoodjs/testing/web' +import Article from 'src/components/Article' + +describe('Article', () => { + const article = { id: 1, title: 'Foobar', body: 'Lorem ipsum...' } + + it('renders the title of an article', () => { + render(<Article article={article} />) + + expect(screen.getByText('Foobar')).toBeInTheDocument() + }) + + it('renders a summary version', () => { + render(<Article article={article} summary={true} />) + + expect(screen.getByText('Read more')).toBeInTheDocument() + }) + + it('renders a full version', () => { + render(<Article article={article} summary={false} />) + + expect(screen.queryByText('Read more')).not.toBeInTheDocument() + }) +}) +``` + +#### getByRole() / queryByRole() + +`getByRole()` allows you to look up elements by their "role", which is an ARIA element that assists in accessibility features. Many HTML elements have a [default role](https://www.w3.org/TR/html-aria/#docconformance) (including `<button>` and `<a>`) but you can also define one yourself with a `role` attribute on an element. + +Sometimes it may not be enough to say "this text must be on the page." You may want to test that an actual *link* is present on the page. Maybe you have a list of users' names and each name should be a link to a detail page. We could test that like so: + +```jsx +it('renders a link with a name', () => { + render(<List data={[{ name: 'Rob' }, { name: 'Tom' }]} />) + + expect(screen.getByRole('link', { name: 'Rob' })).toBeInTheDocument() + expect(screen.getByRole('link', { name: 'Tom' })).toBeInTheDocument() +}) +``` + +`getByRole()` expects the role (`<a>` elements have a default role of `link`) and then an object with options, one of which is `name` which refers to the text content inside the element. Check out [the docs for the `*ByRole` queries](https://testing-library.com/docs/queries/byrole). + +If we wanted to eliminate some duplication (and make it easy to expand or change the names in the future): + +```jsx +it('renders a link with a name', () => { + const data = [{ name: 'Rob' }, { name: 'Tom' }] + + render(<List data={data} />) + + data.forEach((datum) => { + expect(screen.getByRole('link', { name: datum.name })).toBeInTheDocument() + }) +}) +``` + +But what if we wanted to check the `href` of the link itself to be sure it's correct? In that case we can capture the `screen.getByRole()` return and run expectations on that as well (the `forEach()` loop has been removed here for simplicity): + +```jsx {1,6-8} +import { routes } from '@redwoodjs/router' + +it('renders a link with a name', () => { + render(<List data={[{ id: 1, name: 'Rob' }]} />) + + const element = screen.getByRole('link', { name: data.name }) + expect(element).toBeInTheDocument() + expect(element).toHaveAttribute('href', routes.user({ id: data.id })) +}) +``` + +> **Why so many empty lines in the middle of the test?** +> +> You may have noticed a pattern of steps begin to emerge in your tests: +> +> 1. Set variables or otherwise prepare some code +> 2. `render` or execute the function under test +> 3. `expect`s to verify output +> +> Most tests will contain at least the last two, but sometimes all three of these parts, and in some communities it's become standard to include a newline between each "section". Remember the acronym SEA: setup, execute, assert. + +#### Jest Expect: Type Considerations + +Redwood uses [prisma](https://www.prisma.io/) as an ORM for connecting to different databases like PostgreSQL, MySQL, and many more. The database models are defined in the `schema.prisma` file. Prisma schema supports [`model` field scaler types](https://www.prisma.io/docs/reference/api-reference/prisma-schema-reference#model-field-scalar-types) which is used to define the data types for the models properties. + +Due to this, there are some exceptions that can occur while testing your API and UI components. + +#### Floats and Decimals +Prisma recommends using `Decimal` instead of `Float` because of accuracy in precision. Float is inaccurate in the number of digits after decimal whereas Prisma returns a string for Decimal value which preserves all the digits after the decimal point. + +e.g., using `Float` type +```jsx {4} +Expected: 1498892.0256940164 +Received: 1498892.025694016 + +expect(result.floatingNumber).toEqual(1498892.0256940164) +``` + +e.g., using `Decimal` type +```jsx {4} +Expected: 7420440.088194787 +Received: "7420440.088194787" + +expect(result.floatingNumber).toEqual(7420440.088194787) +``` + +In the above examples, we can see expect doesn't preserve the floating numbers. Using decimals, the number is matched with the expected result. + +> For cases where using decimal is not optimal, see the [Jest Expect documentation](https://jestjs.io/docs/expect) for other options and methods. + +#### DateTime + +Prisma returns [DateTime](https://www.prisma.io/docs/reference/api-reference/prisma-schema-reference#datetime) as ISO 8601-formatted strings. So, you can convert the date to ISO String in JavaScript: + +```jsx {1} +// Output: '2021-10-15T19:40:33.000Z' +const isoString = new Date("2021-10-15T19:40:33Z").toISOString() +``` + +#### Other Queries/Matchers + +There are several other node/text types you can query against with React Testing Library, including `title`, `role` and `alt` attributes, Form labels, placeholder text, and more. + +If you still can't access the node or text you're looking for there's a fallback attribute you can add to any DOM element that can always be found: `data-testid` which you can access using `getByTestId`, `queryByTestId` and others (but it involves including that attribute in your rendered HTML always, not just when running the test suite). + +You can refer to the [Cheatsheet](https://testing-library.com/docs/react-testing-library/cheatsheet/) from React Testing Library with the various permutations of `getBy`, `queryBy` and siblings. + +The full list of available matchers like `toBeInTheDocument()` and `toHaveAttribute()` don't seem to have nice docs on the Testing Library site, but you can find them in the [README](https://github.com/testing-library/jest-dom) inside the main repo. + +In addition to testing for static things like text and attributes, you can also use fire events and check that the DOM responds as expected. + +You can read more about these in below documentations: + + +- [React Testing Library User Events](https://testing-library.com/docs/ecosystem-user-event) +- [React Testing Library Jest DOM](https://testing-library.com/docs/ecosystem-jest-dom) +- [Official Testing Library](https://testing-library.com/docs/). + +### Mocking GraphQL Calls + +If you're using GraphQL inside your components, you can mock them to return the exact response you want and then focus on the content of the component being correct based on that data. Returning to our **<Article>** component, let's make an update where only the `id` of the article is passed to the component as a prop and then the component itself is responsible for fetching the content from GraphQL: + +> Normally we recommend using a cell for exactly this functionality, but for the sake of completeness we're showing how to test when doing GraphQL queries the manual way! + +```jsx title="web/src/components/Article/Article.js" +import { useQuery } from '@redwoodjs/web' + +const GET_ARTICLE = gql` + query getArticle($id: Int!) { + article(id: $id) { + id + title + body + } + } +` + +const Article = ({ id }) => { + const { data } = useQuery(GET_ARTICLE, { variables: { id } }) + + if (data) { + return ( + <article> + <h1>{data.article.title}</h1> + <div>{data.article.body}</div> + </article> + ) + } else { + return 'Loading...' + } +} + +export default Article +``` + +#### mockGraphQLQuery() + +Redwood provides the test function `mockGraphQLQuery()` for providing the result of a given named GraphQL. In this case our query is named `getArticle` and we can mock that in our test as follows: + +```jsx {6-14,18} title="web/src/components/Article/Article.test.js" +import { render, screen } from '@redwoodjs/testing/web' +import Article from 'src/components/Article' + +describe('Article', () => { + it('renders the title of an article', async () => { + mockGraphQLQuery('getArticle', (variables) => { + return { + article: { + id: variables.id, + title: 'Foobar', + body: 'Lorem ipsum...', + } + } + }) + + render(<Article id={1} />) + + expect(await screen.findByText('Foobar')).toBeInTheDocument() + }) +}) +``` + +We're using a new query here, `findByText()`, which allows us to find things that may not be present in the first render of the component. In our case, when the component first renders, the data hasn't loaded yet, so it will render only "Loading..." which does *not* include the title of our article. Without it the test would immediately fail, but `findByText()` is smart and waits for subsequent renders or a maximum amount of time before giving up. + +Note that you need to make the test function `async` and put an `await` before the `findByText()` call. Read more about `findBy*()` queries and the higher level `waitFor()` utility [here](https://testing-library.com/docs/dom-testing-library/api-async). + +The function that's given as the second argument to `mockGraphQLQuery` will be sent a couple of arguments. The first—and only one we're using here—is `variables` which will contain the variables given to the query when `useQuery` was called. In this test we passed an `id` of `1` to the **<Article>** component when test rendering, so `variables` will contain `{id: 1}`. Using this variable in the callback function to `mockGraphQLQuery` allows us to reference those same variables in the body of our response. Here we're making sure that the returned article's `id` is the same as the one that was requested: + +```jsx {3} +return { + article: { + id: variables.id, + title: 'Foobar', + body: 'Lorem ipsum...', + } +} +``` + +Along with `variables` there is a second argument: an object which you can destructure a couple of properties from. One of them is `ctx` which is the context around the GraphQL response. One thing you can do with `ctx` is simulate your GraphQL call returning an error: + +```jsx +mockGraphQLQuery('getArticle', (variables, { ctx }) => { + ctx.errors([{ message: 'Error' }]) +}) +``` + +You could then test that you show a proper error message in your component: + +```jsx {2,6-8,18-20,24} title="web/src/components/Article/Article.js" +const Article = ({ id }) => { + const { data, error } = useQuery(GET_ARTICLE, { + variables: { id }, + }) + + if (error) { + return <div>Sorry, there was an error</div> + } + + if (data) { + // ... + } +} + +// web/src/components/Article/Article.test.js + +it('renders an error message', async () => { + mockGraphQLQuery('getArticle', (variables, { ctx }) => { + ctx.errors([{ message: 'Error' }]) + }) + + render(<Article id={1} />) + + expect(await screen.findByText('Sorry, there was an error')).toBeInTheDocument() +}) +``` + +#### mockGraphQLMutation() + +Similar to how we mocked GraphQL queries, we can mock mutations as well. Read more about GraphQL mocking in our [Mocking GraphQL requests](graphql/mocking-graphql-requests.md) docs. + +### Mocking Auth + +Most applications will eventually add [Authentication/Authorization](authentication.md) to the mix. How do we test that a component behaves a certain way when someone is logged in, or has a certain role? + +Consider the following component (that happens to be a page) which displays a "welcome" message if the user is logged in, and a button to log in if they aren't: + +```jsx title="web/src/pages/HomePage/HomePage.js" +import { useAuth } from '@redwoodjs/auth' + +const HomePage = () => { + const { isAuthenticated, currentUser, logIn } = useAuth() + + return ( + <> + <header> + { isAuthenticated && <h1>Welcome back {currentUser.name}</h1> } + </header> + <main> + { !isAuthenticated && <button onClick={logIn}>Login</button> } + </main> + </> + ) +} +``` + +If we didn't do anything special, there would be no user logged in and we could only ever test the not-logged-in state: + +```jsx title="web/src/pages/HomePage/HomePage.test.js" +import { render, screen } from '@redwoodjs/testing/web' +import HomePage from './HomePage' + +describe('HomePage', () => { + it('renders a login button', () => { + render(<HomePage />) + + expect(screen.getByRole('button', { name: 'Login' })).toBeInTheDocument() + }) +}) +``` + +This test is a little more explicit in that it expects an actual `<button>` element to exist and that it's label (name) be "Login". Being explicit with something as important as the login button can be a good idea, especially if you want to be sure that your site is friendly to screen-readers or another assistive browsing devices. + +#### mockCurrentUser() on the Web-side + +How do we test that when a user *is* logged in, it outputs a message welcoming them, and that the button is *not* present? Similar to `mockGraphQLQuery()` Redwood also provides a `mockCurrentUser()` which tells Redwood what to return when the `getCurrentUser()` function of `api/src/lib/auth.js` is invoked: + +```jsx title="web/src/pages/HomePage/HomePage.test.js" +import { render, screen, waitFor } from '@redwoodjs/testing/web' +import HomePage from './HomePage' + +describe('HomePage', () => { + it('renders a login button when logged out', () => { + render(<HomePage />) + + expect(screen.getByRole('button', { name: 'Login' })).toBeInTheDocument() + }) + + it('does not render a login button when logged in', async () => { + mockCurrentUser({ name: 'Rob' }) + + render(<HomePage />) + + await waitFor(() => { + expect( + screen.queryByRole('button', { name: 'Login' }) + ).not.toBeInTheDocument() + }) + }) + + it('renders a welcome message when logged in', async () => { + mockCurrentUser({ name: 'Rob' }) + + render(<HomePage />) + + expect(await screen.findByText('Welcome back Rob')).toBeInTheDocument() + }) +}) +``` + +Here we call `mockCurrentUser()` before the `render()` call. Right now our code only references the `name` of the current user, but you would want this object to include everything a real user contains, maybe an `email` and an array of `roles`. + +We introduced `waitFor()` which waits for a render update before passing/failing the expectation. Although `findByRole()` will wait for an update, it will raise an error if the element is not found (similar to `getByRole()`). So here we had to switch to `queryByRole()`, but that version isn't async, so we added `waitFor()` to get the async behavior back. + +The async behavior here is important. Even after setting the user with `mockCurrentUser()`, `currentUser` may be `null` during the initial render because it's being resolved. Waiting for a render update before passing/failing the exception gives the resolver a chance to execute and populate `currentUser`. + +> Figuring out which assertions need to be async and which ones don't can be frustrating, we know. If you get a failing test when using `screen` you'll see the output of the DOM dumped along with the failure message, which helps find what went wrong. You can see exactly what the test saw (or didn't see) in the DOM at the time of the failure. +> +> If you see some text rendering that you're sure shouldn't be there (because maybe you have a conditional around whether or not to display it) this is a good indication that the test isn't waiting for a render update that would cause that conditional to render the opposite output. Change to a `findBy*` query or wrap the `expect()` in a `waitFor()` and you should be good to go! + +You may have noticed above that we created two tests, one for checking the button and one for checking the "welcome" message. This is a best practice in testing: keep your tests as small as possible by only testing one "thing" in each. If you find that you're using the word "and" in the name of your test (like "does not render a login button *and* renders a welcome message") that's a sign that your test is doing too much. + +#### Mocking Roles + +By including a list of `roles` in the object returned from `mockCurrentUser()` you are also mocking out calls to `hasRole()` in your components so that they respond correctly as to whether `currentUser` has an expected role or not. + +Given a component that does something like this: + +```jsx +const { currentUser, hasRole } = useAuth() + +return ( + { hasRole('admin') && <button onClick={deleteUser}>Delete User</button> } +) +``` + +You can test both cases (user does and does not have the "admin" role) with two separate mocks: + +```jsx +mockCurrentUser({ roles: ['admin'] }) +mockCurrentUser({ roles: [] }) +``` + +That's it! + +### Handling Duplication + +We had to duplicate the `mockCurrentUser()` call and duplication is usually another sign that things can be refactored. In Jest you can nest `describe` blocks and include setup that is shared by the members of that block: + +```jsx +describe('HomePage', () => { + describe('logged out', () => { + it('renders a login button when logged out', () => { + render(<HomePage />) + + expect(screen.getByRole('button', { name: 'Login' })).toBeInTheDocument() + }) + }) + + describe('log in', () => { + beforeEach(() => { + mockCurrentUser({ name: 'Rob' }) + + render(<HomePage />) + }) + + it('does not render a login button when logged in', async () => { + await waitFor(() => { + expect( + screen.queryByRole('button', { name: 'Login' }) + ).not.toBeInTheDocument() + }) + }) + + it('renders a welcome message when logged in', async () => { + expect(await screen.findByText('Welcome back Rob')).toBeInTheDocument() + }) + }) +}) +``` + +While the primordial developer inside of you probably breathed a sign of relief seeing this refactor, heed this warning: the more deeply nested your tests become, the harder it is to read through the file and figure out what's in scope and what's not by the time your actual test is invoked. In our test above, if you just focused on the last test, you would have no idea that `currentUser` is being mocked. Imagine a test file with dozens of tests and multiple levels of nested `describe`s—it becomes a chore to scroll through and mentally keep track of what variables are in scope as you look for nested `beforeEach()` blocks. + +Some schools of thought say you should keep your test files flat (that is, no nesting) which trades ease of readability for duplication: when flat, each test is completely self contained and you know you can rely on just the code inside that test to determine what's in scope. It makes future test modifications easier because each test only relies on the code inside of itself. You may get nervous thinking about changing 10 identical instances of `mockCurrentUser()` but that kind of thing is exactly what your IDE is good at! + +> For what it's worth, your humble author endorses the flat tests style. + +## Testing Custom Hooks + +Custom hooks are a great way to encapsulate non-presentational code. +To test custom hooks, we'll use the `renderHook` function from `@redwoodjs/testing/web`. + +:::info +Note that Redwood's `renderHook` function is based on React Testing Library's. The only difference is that Redwood's wraps everything with mock providers for the various providers in Redwood, such as auth, the GraphQL client, the router, etc. + +If you were to use React Testing Library's `renderHook` function, you'd need to provide your own wrapper function. In this case you probably want to compose the mock providers from `@redwoodjs/testing/web`: + +```jsx +import { renderHook, MockProviders } from '@redwoodjs/testing/web' + +// ... + +renderHook(() => myCustomHook(), { + wrapper: ({ children }) => ( + <MockProviders> + <MyCustomProvider>{children}</MyCustomProvider> + </MockProviders> + ) +}) +``` +::: + +To use `renderHook`: +1. Call your custom hook from an inline function passed to `renderHook`. For example: +```js +const { result } = renderHook(() => useAccumulator(0)) +``` +2. `renderHook` will return an object with the following properties: +- `result`: holds the return value of the hook in its `current` property (so `result.current`). Think of `result` as a `ref` for the most recently returned value +- `rerender`: a function to render the previously rendered hook with new props + +Let's go through an example. Given the following custom hook: + +```js title="web/src/hooks/useAccumulator/useAccumulator.js" +const useAccumulator = (initialValue) => { + const [total, setTotal] = useState(initialValue) + + const add = (value) => { + const newTotal = total + value + setTotal(newTotal) + return newTotal + } + + return { total, add } +} +``` + +The test could look as follows: + +```js title="web/src/hooks/useAccumulator/useAccumulator.test.js" +import { renderHook } from '@redwoodjs/testing/web' +import { useAccumulator } from './useAccumulator' + +describe('useAccumulator hook example in docs', () => { + it('has the correct initial state', () => { + const { result } = renderHook(() => useAccumulator(42)) + expect(result.current.total).toBe(42) + }) + + it('adds a value', () => { + const { result } = renderHook(() => useAccumulator(1)) + result.current.add(5) + expect(result.current.total).toBe(6) + }) + + it('adds multiple values', () => { + const { result } = renderHook(() => useAccumulator(0)) + result.current.add(5) + result.current.add(10) + expect(result.current.total).toBe(15) + }) + + it('re-initializes the accumulator if passed a new initializing value', () => { + const { result, rerender } = renderHook( + (initialValue) => useAccumulator(initialValue), + { + initialProps: 0, + } + ) + result.current.add(5) + rerender(99) + expect(result.current.total).toBe(99) + }) +}) +``` + +While `renderHook` lets you test a custom hook directly, there are cases where encapsulating the custom hook in a component is more robust. See https://kentcdodds.com/blog/how-to-test-custom-react-hooks. + +## Testing Pages & Layouts + +Pages and Layouts are just regular components so all the same techniques apply! + +## Testing Cells + +Testing Cells is very similar to testing components: something is rendered to the DOM and you generally want to make sure that certain expected elements are present. + +Two situations make testing Cells unique: + +1. A single Cell can export up to four separate components +2. There's a GraphQL query taking place + +The first situation is really no different from regular component testing: you just test more than one component in your test. For example: + +```jsx title="web/src/components/ArticleCell/ArticleCell.js" +import Article from 'src/components/Article' + +export const QUERY = gql` + query GetArticle($id: Int!) { + article(id: $id) { + id + title + body + } + } +` + +export const Loading = () => <div>Loading...</div> + +export const Empty = () => <div>Empty</div> + +export const Failure = ({ error }) => <div>Error: {error.message}</div> + +export const Success = ({ article }) => { + return <Article article={article} /> +} +``` + +Here we're exporting four components and if you created this Cell with the [Cell generator](cli-commands.md#generate-cell) then you'll already have four tests that make sure that each component renders without errors: + +```jsx title="web/src/components/ArticleCell/ArticleCell.test.js" +import { render, screen } from '@redwoodjs/testing/web' +import { Loading, Empty, Failure, Success } from './ArticleCell' +import { standard } from './ArticleCell.mock' + +describe('ArticleCell', () => { + it('renders Loading successfully', () => { + expect(() => { + render(<Loading />) + }).not.toThrow() + }) + + it('renders Empty successfully', async () => { + expect(() => { + render(<Empty />) + }).not.toThrow() + }) + + it('renders Failure successfully', async () => { + expect(() => { + render(<Failure error={new Error('Oh no')} />) + }).not.toThrow() + }) + + it('renders Success successfully', async () => { + expect(() => { + render(<Success article={standard().article} />) + }).not.toThrow() + }) +}) +``` + +You might think that "rendering without errors" is a pretty lame test, but it's actually a great start. In React something usually renders successfully or fails spectacularly, so here we're making sure that there are no obvious issues with each component. + +You can expand on these tests just as you would with a regular component test: by checking that certain text in each component is present. + +### Cell Mocks + +When the **<Success>** component is tested, what's this `standard()` function that's passed as the `article` prop? + +If you used the Cell generator, you'll get a `mocks.js` file along with the cell component and the test file: + +```jsx title="web/src/components/ArticleCell.mocks.js" +export const standard = () => ({ + article: { + id: 42, + } +}) +``` + +Each mock will start with a `standard()` function which has special significance (more on that later). The return of this function is the data you want to be returned from the GraphQL `QUERY` defined at the top of your cell. + +> Something to note is that the structure of the data returned by your `QUERY` and the structure of the object returned by the mock is in no way required to be identical as far as Redwood is concerned. You could be querying for an `article` but have the mock return an `animal` and the test will happily pass. Redwood just intercepts the GraphQL query and returns the mock data. This is something to keep in mind if you make major changes to your `QUERY`—be sure to make similar changes to your returned mock data or you could get falsely passing tests! + +Why not just include this data inline in the test? We're about to reveal the answer in the next section, but before we do just a little more info about working with these `mocks.js` file... + +Once you start testing more scenarios you can add custom mocks with different names for use in your tests. For example, maybe you have a case where an article has no body, only a title, and you want to be sure that your component still renders correctly. You could create an additional mock that simulates this condition: + +```jsx title="web/src/components/ArticleCell.mocks.js" +export const standard = () => ({ + article: { + id: 1, + title: 'Foobar', + body: 'Lorem ipsum...' + } +}) + +export const missingBody = { + article: { + id: 2, + title: 'Barbaz', + body: null + } +} +``` + +And then you just reference that new mock in your test: + +```jsx title="web/src/components/ArticleCell/ArticleCell.test.js" +import { render, screen } from '@redwoodjs/testing/web' +import { Loading, Empty, Failure, Success } from './ArticleCell' +import { standard, missingBody } from './ArticleCell.mock' + +describe('ArticleCell', () => { + /// other tests... + + it('Success renders successfully', async () => { + expect(() => { + render(<Success article={standard().article} />) + }).not.toThrow() + }) + + + it('Success renders successfully without a body', async () => { + expect(() => { + render(<Success article={missingBody.article} />) + }).not.toThrow() + }) +}) +``` + +Note that this second mock simply returns an object instead of a function. In the simplest case all you need your mock to return is an object. But there are cases where you may want to include logic in your mock, and in these cases you'll appreciate the function container. Especially in the following scenario... + +### Testing Components That Include Cells + +Consider the case where you have a page which renders a cell inside of it. You write a test for the page (using regular component testing techniques mentioned above). But if the page includes a cell, and a cell wants to run a GraphQL query, what happens when the page is rendered? + +This is where the specially named `standard()` mock comes into play: the GraphQL query in the cell will be intercepted and the response will be *the content of the `standard()` mock*. This means that no matter how deeply nested your component/cell structure becomes, you can count on every cell in that stack rendering in a predictable way. + +And this is where `standard()` being a function becomes important. The GraphQL call is intercepted behind the scenes with the same `mockGraphQLQuery()` function we learned about [earlier](#mocking-graphql). And since it's using that same function, the second argument (the function which runs to return the mocked data) receives the same arguments (`variables` and an object with keys like `ctx`). + +So, all of that is to say that when `standard()` is called it will receive the variables and context that goes along with every GraphQL query, and you can make use of that data in the `standard()` mock. That means it's possible to, for example, look at the `variables` that were passed in and conditionally return a different object. + +Perhaps you have a products page that renders either in stock or out of stock products. You could inspect the `status` that's passed into via `variables.status` and return a different inventory count depending on whether the calling code wants in-stock or out-of-stock items: + +```jsx title="web/src/components/ProductCell/ProductCell.mock.js" +export const standard = (variables) => { + return { + products: [ + { + id: variables.id, + name: 'T-shirt', + inventory: variables.status === 'instock' ? 100 : 0 + } + ] + } +}) +``` + +Assuming you had a **<ProductPage>** component: + +```jsx title="web/src/pages/ProductPage/ProductPage.js" +import ProductCell from 'src/components/ProductCell' + +const ProductPage = ({ status }) => { + return { + <div> + <h1>{ status === 'instock' ? 'In Stock' : 'Out of Stock' }</h1> + <ProductsCell status={status} /> + </div> + } +} +``` + +Which, in your page test, would let you do something like: + +```jsx title="web/src/pages/ProductPage/ProductPage.test.js" +import { render, screen } from '@redwoodjs/testing/web' +import ArticleCell from 'src/components/ArticleCell' + +describe('ProductPage', () => { + it('renders in stock products', () => { + render(<ProductPage status='instock' />) + + expect(screen.getByText('In Stock')).toBeInTheDocument() + }) + + it('renders out of stock products', async () => { + render(<ProductPage status='outofstock' />) + + expect(screen.getByText('Out of Stock')).toBeInTheDocument() + }) +}) +``` + +Be aware that if you do this, and continue to use the `standard()` mock in your regular cell tests, you'll either need to start passing in `variables` yourself: + +```jsx {8} title="web/src/components/ArticleCell/ArticleCell.test.js" +describe('ArticleCell', () => { + /// other tests... + test('Success renders successfully', async () => { + expect(() => { + render(<Success article={standard({ status: 'instock' }).article} />) + }).not.toThrow() + }) +}) +``` + +Or conditionally check that `variables` exists at all before basing any logic on them: + +```jsx {4,15} title="web/src/components/ArticleCell/ArticleCell.mock.js" +export const standard = (variables) => { + return { + product: { + id: variables?.id || 1, + name: 'T-shirt', + inventory: variables && variables.status === 'instock' ? 100 : 0 + } + } +}) +``` + +## Testing Forms + +> An alternative explanation, written in TypeScript and featuring a Storybook example, [can be found on the RedwoodJS forum](https://community.redwoodjs.com/t/testing-forms-using-testing-library-user-event/2058). + +To test our forms, we can make use of of the [`@testing-library/user-event`](https://testing-library.com/docs/ecosystem-user-event/) library which helps us approximate the the events that would actually happen in the browser if a real user were interacting with our forms. For example, calling `userEvent.click(checkbox)` toggles a checkbox as if a user had clicked it. + +### Installing `@testing-library/user-event` + +`user-event` can be installed in the web side of your application by running: + +```bash +yarn workspace web add -D @testing-library/user-event +``` + +### Building a Form + +Let's assume you've already created a component using `yarn rw g component`. This component is built using the `@redwoodjs/forms` package and provides a simple interface for using the form: we subscribe to changes via an `onSubmit` callback-prop. + +```jsx title="NameForm.js" +import { Form, Submit, TextField } from '@redwoodjs/forms' + +const NameForm = ({ onSubmit }) => { + return ( + <Form onSubmit={onSubmit}> + <TextField + name="name" + placeholder="Name" + validation={{ + required: true, + }} + /> + <TextField + name="nickname" + placeholder="Nickname" + validation={{ + required: false, + }} + /> + <Submit>Submit</Submit> + </Form> + ) +} + +export default NameForm +``` + +### Testing the Form + +Now, we can extend the `test` file which Redwood generated. We're going to want to: + +1. Import `waitFor` from the `@redwoodjs/testing/web` library. +2. Add an import to `@testing-library/user-event` for its `default`. +3. Provide an `onSubmit` prop to our "renders successfully" test. + +```jsx title="NameForm.test.js" +import { render, screen, waitFor } from '@redwoodjs/testing/web' +import userEvent from '@testing-library/user-event' + +import NameForm from './NameForm' + +describe('NameForm', () => { + it('renders successfully', () => { + expect(() => { + const onSubmit = jest.fn() + + render(<NameForm onSubmit={onSubmit} />) + }).not.toThrow() + }) +}) +``` + +Finally, we'll create three simple tests which ensure our form works as expected. + +1. Does our component NOT submit when required fields are empty? +2. Does our component submit when required fields are populated? +3. Does our component submit, passing our (submit) handler the data we entered? + +The important takeaways are: + +* We use `await` because our form's state will change multiple times; otherwise, our `expect`-ation would trigger prematurely. +* We use `waitFor` because `user-event`'s methods are synchronous, which contradicts the above. + * `waitFor` acts as our declaration of [`act`](https://reactjs.org/docs/test-utils.html#act), required when updating the state of a React component from a test. + +```jsx title="NameForm.test.js" +import { render, screen, waitFor } from '@redwoodjs/testing/web' +import userEvent from '@testing-library/user-event' + +import NameForm from './NameForm' + +describe('NameForm', () => { + + it('does not submit when required fields are empty', async () => { + const onSubmit = jest.fn() + + render(<NameForm onSubmit={onSubmit} />) + + const submitButton = screen.getByText('Submit') + + await waitFor(() => userEvent.click(submitButton)) + + expect(onSubmit).not.toHaveBeenCalled() + }) + + it('submits when required fields are entered', async () => { + const name = 'My Name' + const nickname = '' + + const onSubmit = jest.fn() + + render(<NameForm onSubmit={onSubmit} />) + + const nameField = screen.getByPlaceholderText('Name') + const submitButton = screen.getByText('Submit') + + await waitFor(() => userEvent.type(nameField, name)) + await waitFor(() => userEvent.click(submitButton)) + + expect(onSubmit).toHaveBeenCalledTimes(1) + expect(onSubmit).toHaveBeenCalled() + expect(onSubmit).toHaveBeenCalledWith( + { name, nickname }, + expect.objectContaining({ + _reactName: 'onSubmit', + type: 'submit', + }) + ) + }) + + it('submits with the expected, entered data', async () => { + const name = 'My Name' + const nickname = 'My Nickname' + + const onSubmit = jest.fn() + + render(<NameForm onSubmit={onSubmit} />) + + const nameField = screen.getByPlaceholderText('Name') + const nicknameField = screen.getByPlaceholderText('Nickname') + const submitButton = screen.getByText('Submit') + + await waitFor(() => userEvent.type(nameField, name)) + await waitFor(() => userEvent.type(nicknameField, nickname)) + await waitFor(() => userEvent.click(submitButton)) + + expect(onSubmit).toHaveBeenCalledTimes(1) + expect(onSubmit).toHaveBeenCalled() + expect(onSubmit).toHaveBeenCalledWith( + { name, nickname }, + expect.objectContaining({ + _reactName: 'onSubmit', + type: 'submit', + }) + ) + }) + +// }) +``` + +## Testing Services + +Until now we've only tested things on the web-side of our app. When we test the api-side that means testing our Services. + +In some ways testing a Service feels more "concrete" than testing components—Services deal with hard data coming out of a database or third party API, while components deal with messy things like language, layout, and even design elements. + +Services will usually contain most of your business logic which is important to verify for correctness—crediting or debiting the wrong account number on the Services side could put a swift end to your business! + +### The Test Database + +To simplify Service testing, rather than mess with your development database, Redwood creates a test database that it executes queries against. By default this database will be located at the location defined by a `TEST_DATABASE_URL` environment variable and will fall back to `.redwood/test.db` if that var does not exist. + +If you're using Postgres or MySQL locally you'll want to set that env var to your connection string for a test database in those services. + +:::info + +Does anyone else find it confusing that the software itself is called a "database", but the container that actually holds your data is also called a "database," and you can have multiple databases (containers) within one instance of a database (software)? + +::: + +When you start your test suite you may notice some output from Prisma talking about migrating the database. Redwood will automatically run `yarn rw prisma db push` against your test database to make sure it's up-to-date. + +:::warning What if I have custom migration SQL? + +The `prisma db push` command only restores a snapshot of the current database schema (so that it runs as fast as possible). **It does not actually run migrations in sequence.** This can cause a [problem](https://github.com/redwoodjs/redwood/issues/5818) if you have certain database configuration that *must* occur as a result of the SQL statements inside the migration files. + +In order to preserve those statements in your test database, you can set an additional ENV var which will use the command `yarn rw prisma migrate reset` instead. This will run each migration in sequence against your test database. The tradeoff is that starting your test suite will take a little longer depending on how many migrations you have: + +```.env title="/.env" +TEST_DATABASE_STRATEGY=reset +``` + +Set the variable to `push`, or remove it completely, and it will use the default behavior of running `yarn rw prisma db push`. + +::: + +### Writing Service Tests + +A Service test can be just as simple as a component test: + +```jsx title="api/src/services/users/users.js" +export const createUser = ({ input }) => { + return db.user.create({ data: input }) +} + +// api/src/services/users/users.test.js +import { createUser } from './users' + +describe('users service', () => { + it('creates a user', async () => { + const record = await createUser({ name: 'David' }) + + expect(record.id).not.toBeNull() + expect(record.name).toEqual('David') + }) +}) +``` + +This test creates a user and then verifies that it now has an `id` and that the name is what we sent in as the `input`. Note the use of `async`/`await`: although the service itself doesn't use `async`/`await`, when the service is invoked as a GraphQL resolver, the GraphQL provider sees that it's a Promise and waits for it to resolve before returning the response. We don't have that middleman here in the test suite so we need to `await` manually. + +Did a user really get created somewhere? Yes: in the test database! + +> In theory, it would be possible to mock out the calls to `db` to avoid talking to the database completely, but we felt that the juice wouldn't be worth the squeeze—you will end up mocking tons of functionality that is also under active development (Prisma) and you'd constantly be chasing your tail trying to keep up. So we give devs a real database to access and remove a whole class of frustrating bugs and false test passes/failures because of out-of-date mocks. + +### Database Seeding + +What about testing code that retrieves a record from the database? Easy, just pre-seed the data into the database first, then test the retrieval. **Seeding** refers to setting some data in the database that some other code requires to be present to get its job done. In a production deployment this could be a list of pre-set tags that users can apply to forum posts. In our tests it refers to data that needs to be present for our *actual* test to use. + +In the following code, the "David" user is the seed. What we're actually testing is the `users()` and `user()` functions. We verify that the data returned by them matches the structure and content of the seed: + +```jsx +it('retrieves all users', async () => { + const data = await createUser({ name: 'David' }) + + const list = await users({ id: data.id }) + + expect(list.length).toEqual(1) +}) + +it('retrieves a single user', async () => { + const data = await createUser({ name: 'David' }) + + const record = await user({ id: data.id }) + + expect(record.id).toEqual(data.id) + expect(record.name).toEqual(data.name) +}) +``` + +Notice that the string "David" only appears once (in the seed) and the expectations are comparing against values in `data`, not the raw strings again. This is a best practice and makes it easy to update your test data in one place and have the expectations continue to pass without edits. + +Did your spidey sense tingle when you saw that exact same seed duplicated in each test? We probably have other tests that check that a user is editable and deletable, both of which would require the same seed again! Even more tingles! When there's obvious duplication like this you should know by now that Redwood is going to try and remove it. + +### Scenarios + +Redwood created the concept of "scenarios" to cover this common case. A scenario is a set of seed data that you can count on existing at the start of your test and removed again at the end. This means that each test lives in isolation, starts with the exact same database state as every other one, and any changes you make are only around for the length of that one test, they won't cause side-effects in any other. + +When you use any of the generators that create a service (scaffold, sdl or service) you'll get a `scenarios.js` file alongside the service and test files: + +```jsx +export const standard = defineScenario({ + user: { + one: { + data: { + name: 'String', + }, + }, + two: { + data: { + name: 'String', + }, + } + }, +}) +``` + +This scenario creates two user records. The generator can't determine the intent of your fields, it can only tell the datatypes, so strings get prefilled with just 'String'. What's up with the `one` and `two` keys? Those are friendly names you can use to reference your scenario data in your test. + +The `data` key is one of Prisma's [create options](https://www.prisma.io/docs/reference/api-reference/prisma-client-reference#create). It's the same as in your Services—everything in the `one` and `two` keys actually just gets passed to Prisma's create. You can even create [relationships](#relationships) if you want. + +Let's look at a better example. We'll update the scenario with some additional data and give them a more distinctive name: + +```jsx +export const standard = defineScenario({ + user: { + anthony: { + data : { + name: 'Anthony Campolo', + email: 'anthony@redwoodjs.com' + }, + }, + dom: { + data: { + name: 'Dom Saadi', + email: 'dom@redwoodjs.com' + }, + } + }, +}) +``` + +Note that even though we are creating two users we don't use array syntax and instead just pass several objects. Why will become clear in a moment. + +Now in our test we replace the `it()` function with `scenario()`: + +```jsx +scenario('retrieves all users', async (scenario) => { + const list = await users() + + expect(list.length).toEqual(Object.keys(scenario.user).length) +}) + +scenario('retrieves a single user', async (scenario) => { + const record = await user({ id: scenario.user.dom.id }) + + expect(record.id).toEqual(scenario.user.dom.id) +}) +``` + +The `scenario` argument passed to the function contains the scenario data *after being inserted into the database* which means it now contains the real `id` that the database assigned the record. Any other fields that contain a database default value will be populated, included DateTime fields like `createdAt`. We can reference individual model records by name, like `scenario.user.dom`. This is why scenario records aren't created with array syntax: otherwise we'd be referencing them with syntax like `scenario.user[1]`. Yuck. + +#### Named Scenarios + +You may have noticed that the scenario we used was once again named `standard`. This means it's the "default" scenario if you don't specify a different name. This implies that you can create more than one scenario and somehow use it in your tests. And you can: + +```jsx +export const standard = defineScenario({ + user: { + anthony: { + data : { + name: 'Anthony Campolo', + email: 'anthony@redwoodjs.com' + }, + }, + dom: { + data: { + name: 'Dom Saadi', + email: 'dom@redwoodjs.com' + }, + } + }, +}) + +export const incomplete = defineScenario({ + user: { + david: { + data: { + name: 'David Thyresson', + email: 'dt@redwoodjs.com' + }, + }, + forrest: { + data: { + name: '', + email: 'forrest@redwoodjs.com' + }, + } + } +}) +``` + +```jsx +scenario('incomplete', 'retrieves only incomplete users', async (scenario) => { + const list = await users({ complete: false }) + expect(list).toMatchObject([scenario.user.forrest]) +}) +``` + +The name of the scenario you want to use is passed as the *first* argument to `scenario()` and now those will be the only records present in the database at the time the test is run. Assume that the `users()` function contains some logic to determine whether a user record is "complete" or not. If you pass `{ complete: false }` then it should return only those that it determines are not complete, which in this case includes users who have not entered their name yet. We seed the database with the scenario where one user is complete and one is not, then check that the return of `users()` only contains the user without the name. + +#### Multiple Models + +You're not limited to only creating a single model type in your scenario, you can populate every table in the database if you want. + +```jsx +export const standard = defineScenario({ + product: { + shirt: { + data: { + name: 'T-shirt', + inventory: 5 + }, + } + }, + order: { + first: { + data: { + poNumber: 'ABC12345' + }, + } + }, + paymentMethod: { + credit: { + data: { + type: 'Visa', + last4: 1234 + }, + } + } +}) +``` + +And you reference all of these on your `scenario` object as you would expect + +```jsx +scenario.product.shirt +scenario.order.first +scenario.paymentMethod.credit +``` + +#### Relationships + +What if your models have relationships to each other? For example, a blog **Comment** has a parent **Post**. Scenarios are passed off to Prisma's [create](https://www.prisma.io/docs/concepts/components/prisma-client/crud#create) function, which includes the ability to create nested relationship records simultaneously: + +```jsx +export const standard = defineScenario({ + comment: { + first: { + data: { + name: 'Tobbe', + body: 'But it uses some letters twice' + post: { + create: { + title: 'Every Letter', + body: 'The quick brown fox jumped over the lazy dog.' + } + } + }, + } + } +}) +``` + +Now you'll have both the comment and the post it's associated to in the database and available to your tests. For example, to test that you are able to create a second comment on this post: + +```jsx +scenario('creates a second comment', async (scenario) => { + const comment = await createComment({ + input: { + name: 'Billy Bob', + body: "A tree's bark is worse than its bite", + postId: scenario.comment.jane.postId, + }, + }) + + const list = await comments({ postId: scenario.comment.jane.postId }) + + expect(list.length).toEqual(Object.keys(scenario.comment).length + 1) +}) +``` + +`postId` is created by Prisma after creating the nested `post` model and associating it back to the `comment`. + +Why check against `Object.keys(scenario.comment).length + 1` and not just `2`? Because if we ever update the scenario to add more records (maybe to support another test) this test will keep working because it only assumes what *it itself* did: add one comment to existing count of comments in the scenario. + +You can also [include](https://www.prisma.io/docs/concepts/components/prisma-client/select-fields/) the post object (or `select` specific fields from it): + +``` javascript +export const standard = defineScenario({ + comment: { + first: { + data: { + name: 'Rob', + body: 'Something really interesting' + post: { + create: { + title: 'Brand new post', + body: 'Introducing dbAuth' + } + } + }, + include: { + post: true + } + } + } +}) +``` + +Then you’ll have both the `comment` and its `post`: + +```jsx +scenario('retrieves a comment with post', async (scenario) => { + const comment = await commentWithPost({ id: scenario.comment.first.id }) + + expect(comment.post.title).toEqual(scenario.comment.first.post.title) +}) +``` + +#### Relationships with Existing Records + +If your models have relationships and you need to connect new records to existing ones, using the object syntax just isn't going to cut it. + +Consider a `Comment`: it has a parent `Post`, and both of them have an `Author`. Using the object syntax, there's no way of accessing the `authorId` of the `Author` we just created. We could potentially hardcode it, but that's bad practice. + +```jsx +export const standard = defineScenario({ + post: { + first: { + data: { + name: 'First Post', + author: { create: { name: 'Kris' }}, + comments: { + create: [ + { + name: 'First Comment', + body: 'String', + authorId: // Here we want a different author... + }, + { + name: 'First Comment Response', + body: 'String', + authorId: // But here we want the same author as the post... + }, + ], + }, + } + }), + }, +}) +``` + +When you run into this, you can access an existing `scenario` record using the distinctive name key as a function that returns an object: + +```jsx +export const standard = defineScenario({ + author: { + kris: { + data: { name: 'Kris' } + } + rob: { + data: { name: 'rob' } + } + }, + post: { + first: (scenario) => ({ + data: { + name: 'First Post', + authorId: scenario.author.kris.id, + comments: { + create: [ + { + name: 'First Comment', + body: 'String', + authorId: scenario.author.rob.id, + }, + { + name: 'First Comment Response', + body: 'String', + authorId: scenario.author.kris.id, + }, + ], + }, + } + }), + }, +}) +``` + +Since [ES2015](https://tc39.es/ecma262/#sec-ordinaryownpropertykeys), object property keys are in ascending order of creation. This means that a key in `defineScenario` has access to key(s) created before it. We can leverage this like so: + +```jsx +export const standard = defineScenario({ + user: { + kris: { + data: { name: 'Kris' } + } + }, + post: { + first: (scenario) => ({ + // Here you have access to the user above via `scenario.user` + }), + }, + comment: { + first: (scenario) => ({ + // Here you have access to both `scenario.user` and `scenario.post` + }) + } +}) +``` + +:::tip + +Looking for info on how TypeScript works with Scenarios? Check out the [Utility Types](typescript/utility-types.md#scenarios--testing) doc + +::: + +#### Which Scenarios Are Seeded? + +Only the scenarios named for your test are included at the time the test is run. This means that if you have: + +* `posts.test.js` +* `posts.scenarios.js` +* `comments.test.js` +* `comments.scenarios.js` + +Only the posts scenarios will be present in the database when running the `posts.test.js` and only comments scenarios will be present when running `comments.test.js`. And within those scenarios, only the `standard` scenario will be loaded for each test unless you specify a differently named scenario to use instead. + +During the run of any single test, there is only ever one scenario's worth of data present in the database: users.standard *or* users.incomplete. + +### describeScenario - a performance optimization + +The scenario feature described above should be the base starting point for setting up test that depend on the database. The scenario sets up the database before each scenario _test_, runs the test, and then tears down (deletes) the database scenario. This ensures that each of your tests are isolated, and that they do not affect each other. + +**However**, there are some situations where you as the developer may want additional control regarding when the database is setup and torn down - maybe to run your test suite faster. + +The `describeScenario` function is utilized to run a sequence of multiple tests, with a single database setup and tear-down. + +```js +// highlight-next-line +describeScenario('contacts', (getScenario) => { + // You can imagine the scenario setup happens here + + // All these tests now use the same setup 👇 + it('xxx', () => { + // Notice that the scenario has to be retrieved using the getter + // highlight-next-line + const scenario = getScenario() + //... + }) + + it('xxx', () => { + const scenario = getScenario() + /... + }) + +}) +``` + +> **CAUTION**: With describeScenario, your tests are no longer isolated. The results, or side-effects, of prior tests can affect later tests. + +Rationale for using `describeScenario` include: +<ul> +<li>Create multi-step tests where the next test is dependent upon the results of the previous test (Note caution above).</li> +<li>Reduce testing run time. There is an overhead to setting up and tearing down the db on each test, and in some cases a reduced testing run time may be of significant benefit. This may be of benefit where the likelihood of side-effects is low, such as in query testing</li> +</ul> + +### describeScenario Examples + +Following is an example of the use of `describeScenario` to speed up testing of a user query service function, where the risk of side-effects is low. + +```ts +// highlight-next-line +describeScenario<StandardScenario>('user query service', (getScenario) => { + + let scenario: StandardScenario + + beforeEach(() => { + // Grab the scenario before each test + // highlight-next-line + scenario = getScenario() + }) + + it('retrieves a single user for a validated user', async () => { + mockCurrentUser({ id: 123, name: 'Admin' }) + + const record = await user({ id: scenario.user.dom.id }) + + expect(record.id).toEqual(scenario.user.dom.id) + }) + + it('throws an error upon an invalid user id', async () => { + mockCurrentUser({ id: 123, name: 'Admin' }) + + const fcn = async () => await user({ id: null as unknown as number }) + + await expect(fcn).rejects.toThrow() + }) + + it('throws an error if not authenticated', async () => { + const fcn = async () => await user({ id: scenario.user.dom.id }) + + await expect(fcn).rejects.toThrow(AuthenticationError) + }) + + it('throws an error if the user is not authorized to query the user', async () => { + mockCurrentUser({ id: 999, name: 'BaseLevelUser' }) + + const fcn = async () => await user({ id: scenario.user.dom.id }) + + await expect(fcn).rejects.toThrow(ForbiddenError) + }) +}) +``` + +:::tip Using named scenarios with describeScenario + +If you have multiple scenarios, you can also use named scenario with `describeScenario` + +For example: +```js + // If we have a paymentDeclined scenario defined in the .scenario.{js,ts} file + // The second parameter is the name of the "describe" block + describeScenario('paymentDeclined', 'Retrieving details', () => { + // .... + }) +``` +::: + + + +### mockCurrentUser() on the API-side + +Just like when testing the web-side, we can use `mockCurrentUser()` to mock out the user that's currently logged in (or not) on the api-side. + +Let's say our blog, when commenting, would attach a comment to a user record if that user was logged in while commenting. Otherwise the comment would be anonymous: + +```jsx title="api/src/services/comments/comments.js" +export const createComment = ({ input }) => { + if (context.currentUser) { + return db.comment.create({ data: { userId: context.currentUser.id, ...input }}) + } else { + return db.comment.create({ data: input }) + } +} +``` + +We could include a couple of tests that verify this functionality like so: + +```jsx title="api/src/services/comments/comments.test.js" +scenario('attaches a comment to a logged in user', async (scenario) => { + mockCurrentUser({ id: 123, name: 'Rob' }) + + const comment = await createComment({ + input: { + body: "It is the nature of all greatness not to be exact.", + postId: scenario.comment.jane.postId, + }, + }) + + expect(comment.userId).toEqual(123) +}) + +scenario('creates anonymous comment if logged out', async (scenario) => { + // currentUser will return `null` by default in tests, but it's + // always nice to be explicit in tests that are testing specific + // behavior (logged in or not)—future devs may not go in with the + // same knowledge/assumptions as us! + mockCurrentUser(null) + + const comment = await createComment({ + input: { + body: "When we build, let us think that we build for ever.", + postId: scenario.comment.jane.postId, + }, + }) + + expect(comment.userId).toEqual(null) +}) +``` + +## Testing Functions + +Testing [serverless functions](serverless-functions.md) and [webhooks](webhooks.md) can be difficult and time-consuming because you have to construct the event and context information that the function handler needs. + +Webhook testing is even more complex because you might need to open a http tunnel to a running dev server to accept an incoming request, then you'll have to sign the webhook payload so that the request is trusted, and then you might even trigger events from your third-party service ... all manually. Every. Time. + +Luckily, RedwoodJS has several api testing utilities to make [testing functions and webhooks](serverless-functions.md#how-to-test-serverless-functions) a breeze -- and without having to run a dev server. + +> Want to learn to [How to Test Serverless Functions](serverless-functions.md#how-to-test-serverless-functions) and [Webhooks](serverless-functions.md#how-to-test-webhooks)? +> +> We have an entire testing section in the [Serverless Functions documentation](serverless-functions.md) that will walk your through an example of a function and a webhook. + +## Testing GraphQL Directives + +Please refer to the [Directives documentation](./directives.md) for details on how to write Redwood [Validator](./directives.md#writing-validator-tests) or [Transformer](./directives.md#writing-transformer-tests) Directives tests. + + +## Testing Caching +If you're using Redwood's [caching](services#caching), we provide a handful of utilities and patterns to help you test this too! + +Let's say you have a service where you cache the result of products, and individual products: + +```ts +export const listProducts: QueryResolvers['listProducts'] = () => { + // highlight-next-line + return cacheFindMany('products-list', db.product, { + expires: 3600, + }) +} + +export const product: QueryResolvers['product'] = async ({ id }) => { + // highlight-next-line + return cache( + `cached-product-${id}`, + () => + db.product.findUnique({ + where: { id }, + }), + { expires: 3600 } + ) +} +``` + +With this code, we'll be caching an array of products (from the find many), and individual products that get queried too. + + +:::tip +It's important to note that when you write scenario or unit tests, it will use the `InMemoryClient`. + +The InMemoryClient has a few extra features to help with testing. + +1. Allows you to call `cacheClient.clear()` so each of your tests have a fresh cache state +2. Allows you to get all its contents (without cache-keys) with the `cacheClient.contents` getter +::: + + +There's a few different things you may want to test, but let's start with the basics. + +In your test let's import your cache client and clear after each test: + + +```ts +import type { InMemoryClient } from '@redwoodjs/api/cache' +import { client } from 'src/lib/cache' + +// For TypeScript users +const testCacheClient = client as InMemoryClient + +describe('products', () => { + // highlight-start + afterEach(() => { + testCacheClient.clear() + }) + // highlight-end + //.... +}) +``` + +### The `toHaveCached` matcher +We have a custom Jest matcher included in Redwood to make things a little easier. To use it simply add an import to the top of your test file: + +```ts +// highlight-next-line +import '@redwoodjs/testing/cache' +// ^^ make `.toHaveCached` available +``` + +The `toHaveCached` matcher can take three forms: + +`expect(testCacheClient)` +1. `.toHaveCached(expectedData)` - check for an exact match of the data, regardless of the key +2. `.toHaveCached('expected-key', expectedData)` - check that the data is cached in the key you supply +3. `.toHaveCached(/key-regex.*/, expectedData)` - check that data is cached in a key that matches the regex supplied + + +Let's see these in action now: + +```ts +scenario('returns a single product', async (scenario: StandardScenario) => { + await product({ id: scenario.product.three.id }) + +// Pattern 1: Only check that the data is present in the cache + expect(testCacheClient).toHaveCached(scenario.product.three) + +// Pattern 2: Check that data is cached, at a specific key + expect(testCacheClient).toHaveCached( + `cached-product-${scenario.product.three.id}`, + scenario.product.three + ) + +// Pattern 3: Check that data is cached, in a key matching the regex + expect(testCacheClient).toHaveCached( + /cached-.*/, + scenario.product.three + ) +``` + + +:::info Serialized Objects in Cache +Remember that the cache only ever contains serialized objects. So if you passed an object like this: +```js +{ + id: 5, + published: new Date('12/10/1995') +} + +``` + +The published key will be serialized and stored as a string. To make testing easier for you, we serialize the object you are passing when you use the `toHaveCached` matcher, before we compare it against the value in the cache. +::: + +### Partial Matching +It can be a little tedious to check that every key in the object you are looking for matches. This is especially true if you have autogenerated values such as `updatedAt` and `cuid` IDs. + +To help with this, we've provided a helper for partial matching! + +```ts +// highlight-next-line +import { partialMatch } from '@redwoodjs/testing/cache' + +scenario('returns all products', async (scenario: StandardScenario) => { + await products() + + // Partial match using the toHaveCached, if you supply a key + expect(testCacheClient).toHaveCached( + /cached-products.*/, + // highlight-next-line + partialMatch([{ name: 'LS50', brand: 'KEF' }]) + ) + + // Or you can use the .contents getter + expect(testCacheClient.contents).toContainEqual( + // check that an array contains an object matching + // highlight-next-line + partialMatch([{ name: 'LS50', brand: 'KEF' }]) + ) +} + +scenario('finds a single product', () = { + await product({id: 5}) + + // You can also check for a partial match of an object + expect(testCacheClient).toHaveCached( + /cached-.*/, + // highlight-start + partialMatch({ + name: 'LS50', + brand: 'KEF' + }) + ) + // highlight-end +}) +``` + +Partial match is just syntactic sugar—underneath it uses Jest's `expect.objectContaining` and `expect.arrayContaining`. + +The `partialMatch` helper takes two forms of arguments: + +- If you supply an object, you are expecting a partial match of that object +- If you supply an array of objects, you are expecting an array containing a partial match of each of the objects + + +:::tip +Note that you cannot use `partialMatch` with toHaveCached without supplying a key! + +```ts +// 🛑 Will never pass! +expect(testCacheClient).toHaveCached(partialMatch({name: 'LS50'})) +``` + +For partial matches, you either have to supply a key to `toHaveCached` or use the `cacheClient.contents` helper. +::: + + +### Strict Matching + +If you'd like stricter checking (i.e. you do not want helpers to automatically serialize/deserialize your _expected_ value), you can use the `.contents` getter in test cache client. Note that the `.contents` helper will still de-serialize the values in your cache (to make it easier to compare), just not the expected value. + +For example: + +```ts + +const expectedValue = { + // Note that this is a date 👇 + publishDate: new Date('12/10/1988'), + title: 'A book from the eighties', + id: 1988 +} + +// ✅ will pass, because we will serialize the publishedDate for you +expect(testCacheClient).toHaveCached(expectedValue) + + +// 🛑 won't pass, because publishDate in cache is a string, but you supplied a Date object +expect(testCacheClient.contents).toContainEqual(expectedValue) + +// ✅ will pass, because you serialized the date +expect(testCacheClient.contents).toContainEqual({ + ...expectedValue, + publishDate: expectedValue.publishDate.toISOString() +}) + +// And if you wanted to view the raw contents of the cache +console.log(testCacheClient.storage) +``` + +This is mainly helpful when you are testing for a very specific value, or have edgecases in how the serialization/deserialization works in the cache. + + +## Testing Mailer + +If your project uses [RedwoodJS Mailer](./mailer.md) to send emails, you can [also write tests](./mailer.md#testing) to make sure that email: + +* is sent to an sandbox inbox +* renders properly +* sets the expected to, from, cc, bcc, subject attributes based on the email sending logic +* checks that the html and text content is set correctly + +Since these tests send mail to a sandbox inbox, you can be confident that no emails accidentally get sent into the wild as part of your test or CI runs. + + +## Wrapping Up + +So that's the world of testing according to Redwood. Did we miss anything? Can we make it even more awesome? Stop by [the community](https://community.redwoodjs.com) and ask questions, or if you've thought of a way to make this doc even better then [open a PR](https://github.com/redwoodjs/redwoodjs.com/pulls). + +Now go out and create (and test!) something amazing! diff --git a/docs/versioned_docs/version-7.0/toast-notifications.md b/docs/versioned_docs/version-7.0/toast-notifications.md new file mode 100644 index 000000000000..9b22ee524bd4 --- /dev/null +++ b/docs/versioned_docs/version-7.0/toast-notifications.md @@ -0,0 +1,133 @@ +--- +description: Toast notifications with react-hot-toast +--- + +# Toast Notifications + +Did you know that those little popup notifications that you sometimes see at the top of a page after you've performed an action are affectionately known as "toast" notifications? +Because they pop up like a piece of toast from a toaster! + +![Example toast animation](https://user-images.githubusercontent.com/300/110032806-71024680-7ced-11eb-8d69-7f462929815e.gif) + +Redwood supports these notifications out of the box thanks to the [react-hot-toast](https://react-hot-toast.com/) package. +We'll refer you to their [docs](https://react-hot-toast.com/docs) since they're very thorough, but here's enough to get you going. + +### Add the `Toaster` Component + +To render toast notifications, start by adding the `Toaster` component. +It's usually better to add it at the App or Layout-level than the Page: + +```jsx title="web/src/layouts/MainLayout/MainLayout.js" +// highlight-next-line +import { Toaster } from '@redwoodjs/web/toast' + +const MainLayout = ({ children }) => { + return ( + <> + // highlight-next-line + <Toaster /> + <main>{children}</main> + </> + ) +} + +export default MainLayout +``` + +### Call the `toast` function + +To render a basic toast notification with default styles, call the `toast` function: + +```jsx title="web/src/layouts/MainLayout/MainLayout.js" +import { toast } from '@redwoodjs/web/toast' + +// ... + +const PostForm = () => { + const [create, { loading, error }] = useMutation(CREATE_POST_MUTATION) + + const onSubmit = async (data) => { + try { + await create({ variables: { input: data }}) + // highlight-next-line + toast('Post created') + } + catch (e) { + // highlight-next-line + toast('Error creating post') + } + } + + return ( + // <Form onSubmit={onSubmit}> ... </Form> + ) +}) + +export default PostForm +``` + +### Call the `toast` variants + +To render a toast notification with default icons and default styles, call the `toast` variants: + +```jsx title="web/src/components/PostForm/PostForm.js" +import { toast } from '@redwoodjs/web/toast' + +// ... + +const PostForm = () => { + const [create, { loading, error }] = useMutation(CREATE_POST_MUTATION, { + onCompleted: () => { + // highlight-next-line + toast.success('Post created') + } + onError: () => { + // highlight-next-line + toast.error('Error creating post') + } + }) + + const onSubmit = (data) => { + create({ variables: { input: data }}) + } + + return ( + // <Form onSubmit={onSubmit}> ... </Form> + ) +}) + +export default PostForm +``` + +or render an async toast by calling the `toast.promise` function: + +```jsx title="web/src/components/PostForm/PostForm.js" +import { toast } from '@redwoodjs/web/toast' + +// ... + +const PostForm = () => { + const [create, { loading, error }] = useMutation(CREATE_POST_MUTATION) + + const onSubmit = (data) => { + // highlight-next-line + toast.promise(create({ variables: { input: data }}), { + loading: 'Creating post...', + success: 'Post created', + error: 'Error creating post', + }) + } + + return ( + // <Form onSubmit={onSubmit}> ... </Form> + ) +}) + +export default PostForm +``` + +:::warning + +You can't use the [onError](https://www.apollographql.com/docs/react/api/react/hooks/#onerror) callback in combination with the `toast.promise` function. + +::: diff --git a/docs/versioned_docs/version-7.0/tutorial/afterword.md b/docs/versioned_docs/version-7.0/tutorial/afterword.md new file mode 100644 index 000000000000..7ba3db0b8c85 --- /dev/null +++ b/docs/versioned_docs/version-7.0/tutorial/afterword.md @@ -0,0 +1,28 @@ + +# Afterword + +You made it! Get yourself some ice cream or a slice of pie: you definitely deserve it. + +Will there be a chapters 8+ of the tutorial? We've spent a lot of time getting our features working but not much time with optimization and polish. [Premature optimization is the root of all evil](http://wiki.c2.com/?PrematureOptimization), but once your site is live and you've got real users on it you'll get a sense of what could be faster, prettier or more efficient. That's when time spent optimizing can pay huge dividends. But, discovering the techniques and best practices for those optimizations...that's a whole different story. The kind of story that Redwood loves to help you write! + +So until next time, a bit of wisdom to help combat that next bout of every developer's nemesis, imposter syndrome: + +> _"There is nothing noble in being superior to your fellow man; true nobility is being superior to your former self."_ — Ernest Hemingway + +## What's Next + +Want to add some more features to your app? Check out some of our how to's like [calling to a third party API](../how-to/using-a-third-party-api.md) and [deploying an app without an API at all](../how-to/disable-api-database.md). We've also got lots of [guides](https://redwoodjs.com/docs/index) for more info on Redwood's internals. + +## Roadmap + +Check out our [Roadmap](https://redwoodjs.com/roadmap) to see where we're headed and how we're going to get there. If you're interested in helping with anything you see, just let us know over on the [RedwoodJS Forum](https://community.redwoodjs.com/) and we'll be happy to get you set up. + +## Help Us! + +What do you think of Redwood? Is it the Next Step for JS frameworks? What can it do better? We've got a lot more planned. Want to help us build these upcoming features? + +- [Open a PR](https://github.com/redwoodjs/redwood/pulls) +- [Write some docs](https://redwoodjs.com/docs/introduction) +- [Join the community](https://community.redwoodjs.com) + +Thanks for following along. Now go out and build something amazing! diff --git a/docs/versioned_docs/version-7.0/tutorial/chapter0/what-is-redwood.md b/docs/versioned_docs/version-7.0/tutorial/chapter0/what-is-redwood.md new file mode 100644 index 000000000000..e779cafefdf2 --- /dev/null +++ b/docs/versioned_docs/version-7.0/tutorial/chapter0/what-is-redwood.md @@ -0,0 +1,345 @@ +# What is Redwood? + +*What follows is a high-level description of Redwood and how it works. If you want to get right to the meat and potatoes of building something, skip ahead to [Chapter 1](../chapter1/prerequisites.md).* + +Redwood is a React framework with lots of pre-installed packages and configuration that makes it easy to build full-stack web applications. + +Now that the elevator pitch is out of the way, what does that actually *mean*? At its core, Redwood is React plus a bunch of stuff that makes your life as a developer easier. Some of that stuff includes: + +* GraphQL +* Prisma +* Jest +* Storybook +* vite +* Babel +* Typescript + +What do we mean when we say a "full-stack web application?" We're talking about your classic web app: a UI that's visible in the browser (the frontend), backed by a server and database (the backend). Until React Server Components came along (more on those later) React had no idea a server and/or database existed: it was up to you to somehow get data into your app. Maybe this was done with a `fetch()` or in a build step which would pre-bake some of the data needed right into your components. However the data got there, it wasn't an ideal solution. + +One of the core principals behind Redwood was that getting data from the backend should be as simple as possible, going so far as to create conventions around it so that retrieving data for display in a component was as easy as adding a couple of lines of code directly into the component itself. Oh and while we're at it, Redwood will automatically show a loading message while waiting for the data, a different state if there's an error, and even a separate message if the data returned from the server is empty (the classic "blank slate"). + +## How a Redwood App Works + +A Redwood app is actually two apps: a frontend (that's the React part) and a backend, which is your server and talks to a database and other third party systems. Your app is technically a monorepo with two top-level directories: `web` containing the frontend code and `api` containing the backend. + +You can start them both with a single command: `yarn redwood dev` + +## The Frontend + +### The Router + +When you open your web app in a browser, React does its thing initializing your app and monitoring the history for changes so that new content can be shown. Redwood features a custom, declarative Router that lets you specify URLs and the requisite pages (just a React component) will be shown. A simple routes file may look something like: + +```jsx +import { Route, Router, Set, PrivateSet } from '@redwoodjs/router' +import ApplicationLayout from 'src/layouts/ApplicationLayout' +import { useAuth } from './auth' + +const Routes = () => { + return ( + <Router useAuth={useAuth}> + <Set wrap={ApplicationLayout}> + <Route path="/login" page={LoginPage} name="login" /> + <Route path="/signup" page={SignupPage} name="signup" /> + <PrivateSet unauthenticated="login"> + <Route path="/dashboard" page={DashboardPage} name="dashboard" /> + <Route path="/products/{sku}" page={ProductsPage} name="products" /> + </PrivateSet> + </Set> + + <Route path="/" page={HomePage} name="home" /> + <Route notfound page={NotFoundPage} /> + </Router> + ) +} +``` + +You can probably get a sense of how all of this works without ever having seen a Redwood route before! Some routes can be marked as `<PrivateSet>` and will not be accessible without being logged in. Others can be wrapped in a "layout" (again, just a React component) to provide common styling shared between pages in your app. + +#### Prerender + +If you have content on your page that can be purely static (like public facing marketing-focused pages) you can simply add the `prerender` attribute to your route and that page will be completely rendered (no matter how deeply nested the internal components go) into an HTML page. This page loads instantly, but still contains the JS needed to include React. Once React loads, the page is rehydrated and becomes interactive. + +You can also prerender pages that contain variables pulled from the URL, like the `/products/{sku}` route above. Redwood will [iterate](../../prerender.md#dynamic-routes--route-hooks) through all available skus and generate a page for each. + +This is Redwood's version of static site generation, aka SSG. + +### Authentication + +The `<PrivateSet>` route limits access to users that are authenticated, but how do they authenticate? Redwood includes integrations to many popular third party authentication hosts (including [Auth0](https://auth0.com/), [Supabase](https://supabase.com/docs/guides/auth) and [Clerk](https://clerk.com/)). You can also [host your own auth](https://redwoodjs.com/docs/auth/dbauth), or write your own [custom authentication](https://redwoodjs.com/docs/auth/custom) option. If going self-hosted, we include login, signup, and reset password pages, as well as the option to include TouchID/FaceID and third party biometric readers! + +Once authenticated, how do you know what a user is allowed to do or not do? Redwood includes helpers for [role-based access control](https://redwoodjs.com/docs/how-to/role-based-access-control-rbac) that integrates on both the front- and backend. + +The homepage is accessible *without* being logged in, browsing to `/` will load the `HomePage` page (component) which itself is just composed of more React components, nothing special there. But, what if the homepage, say, displayed some testimonials from the database? Ahh, now things are getting interesting. Here's where Redwood's handpicked selection of technologies start to take the spotlight. + +### GraphQL + +Redwood uses GraphQL as the glue between the front- and backends: whenever you want data from the server/database, you're going to retrieve it via GraphQL. Now, we could have just given you raw access to some GraphQL library and let you make those calls yourself. We use [Apollo Client](https://www.apollographql.com/apollo-client) on the frontend and Apollo provides hooks like [useQuery()](https://www.apollographql.com/tutorials/lift-off-part1/10-the-usequery-hook) and [useMutation()](https://www.apollographql.com/tutorials/lift-off-part4/08-the-usemutation-hook) to retrieve and set data, respectively. But Redwood has a much deeper integration. + +What if you could have a component that was not only responsible for its own display *but even its own data retrieval*? Meaning everything that component needed in order to display itself could all be self-contained. That includes the code to display while the data is loading, or if something goes wrong. These kinds of uber-components are real, and Redwood calls "cells." + +### Cells + +A cell is still just a React component (also called a [single file component](https://www.swyx.io/react-sfcs-here)), it just happens to follow a couple of conventions that make it work as described above: + +1. The name of the file ends in `Cell" +2. The file exports several named components, at the very least one named `QUERY` and another named `Success` +3. The file can optionally export several other components, like `Loading`, `Failure` and `Empty`. You can probably guess what those are for! + +So, any time React is about to render a cell, the following lifecycle occurs: + +1. The `Loading` component is displayed +2. A `useQuery()` hook is fired, using the exported `QUERY` +3. Assuming the data returns successfully, the `Success` component is rendered with one of the props being the data returned from `useQuery()` + +As an alternative to step 3, if something went wrong then `Failure` is rendered. If the query returned `null` or an empty array, the `Empty` component is rendered. If you don't export either of those then `Success` will be rendered and it would be up to you to show the error or empty state through conditional code. + +Going back to our testimonals hypothetical, a cell to fetch and display them may look something like: + +```js +export const QUERY = gql` + query GetTestimonials { + testimonials { + id + author + quote + } + } +` + +export const Loading = () => <div>Loading...</div> + +export const Failure = ({ error }) => <div>An error occured! {error.message}</div> + +export const Success = ({ testimonials }) => { + return ( + <ul> + {testimonials.map((test) => { + <li key={test.id}>{test.quote} — {test.author}</li> + })} + </ul> + ) +} +``` + +(In this case we don't export `Empty` so that if there aren't any testimonials, that section of the final page won't render anything, not even indicating to the user that something is missing.) + +If you ever create additional clients for your server (a mobile app, perhaps) you'll be giving yourself a huge advantage by using GraphQL from the start. + +Oh, and prerendering also works with cells! At build time, Redwood will start up the GraphQL server and make requests, just as if a user was accessing the pages, rendering the result to plain HTML, ready to be loaded instantly by the browser. + +### Apollo Cache + +The Apollo Client library also intelligently caches the results of that `QUERY` above, and so if the user browses away and returns to the homepage, the `Success` component is now rendered *immediately* from the cache! Simultaneously, the query is made to the server again to see if any data has changed since the cache was populated. If so, the new data is merged into the cache and the component will re-render to show any new testimonials since the last time it was viewed. + +So, you get performance benefits of an instant display of cached data, but with the guarantee that you won't only see stale data: it's constantly being kept in sync with the latest from the server. + +You can also directly manipulate the cache to add or remove entries, or even use it for [state management](https://www.apollographql.com/docs/react/local-state/local-state-management/). + +If you're familiar with GraphQL then you know that on the backend you define the structure of data that GraphQL queries will return with "resolvers." But GraphQL itself doesn't know anything about talking to databases. How does the raw data in the database make it into those resolvers? That's where our next package comes in. + +### Accessibility + +Redwood includes a couple of components to [aid screen readers](https://redwoodjs.com/docs/accessibility) in properly navigating your app. The `<RouteAnnouncement>` component tells a screen reader to read something aloud, even though it isn't visible in the browser. And the `<RouteFocus>` tells a reader to skip verbose navigation options at the top of a page and get to the content. + +## The Backend + +Now we're into the backend code in the `api` directory. + +### Prisma + +[Prisma](https://www.prisma.io/) is the package Redwood uses to talk to your database, and provides automated migrations, type-safety and auto-completion in your IDE. Your Redwood app will contain a file called `schema.prisma` that will reflect your current database schema: + +```prisma +datasource db { + provider = "postgresql" + url = env("DATABASE_URL") +} + +generator client { + provider = "prisma-client-js" + binaryTargets = "native" +} + +model Testimonial { + id Int @id @default(autoincrement()) + author String @unique + quote String + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt +} +``` + +Prisma has a couple of command line tools that take changes to this file and turn them into [SQL DDL commands](https://www.sqlshack.com/sql-ddl-getting-started-with-sql-ddl-commands-in-sql-server/) which are executed against your database to update its structure to match. + +#### GraphQL + +Redwood abstracts the concept of GraphQL resolver into a "service." You will generally start with one service function per GraphQL query/mutation. For example, going back to our testimonials example, you would have a service function named `testimonials()` that returns the data for the GraphQL query named `testimonials`. That function uses Prisma to query the database: + +```js +import { db } from 'src/lib/db' + +export const testimonials = () => { + return db.testimonial.findMany() +} +``` + +How does GraphQL know to go here for its `testimonials` resolver? Redwood introduces an "SDL" file, which contains the mapping from GraphQL to the world of services: + +```js +export const schema = gql` + type Testimonial { + id: Int! + author: String! + quote: String! + createdAt: DateTime! + updatedAt: DateTime! + } + + type Query { + testimonials: [Testimonial!] @skipAuth + } +` +``` + +Any definitions listed in the `type Query` section are expected to have a service function with the same name: `testimonials` -> `testimonials()` + +### Security + +Redwood is secure-by-default: no GraphQL request will be fulfilled if made by an unauthenticated user. You can choose to allow access to certain query/mutations to the public, but you'll have to enable that manually for each option. Consider a more complete Testimonials SDL file: + +```js +export const schema = gql` + type Testimonial { + id: Int! + author: String! + quote: String! + createdAt: DateTime! + updatedAt: DateTime! + } + + type CreateTestimonialInput { + author: String! + quote: String! + } + + type Query { + testimonials: [Testimonial!] @skipAuth + } + + type Mutation { + createTestimonal($input: CreateTestimonialInput!): Testimonial! @requireAuth + deleteTestimonal($id: Int!): Testimonial! @requireAuth + } +` +``` + +The `testimonials` query is marked with the [GraphQL directive](../../directives.md) `@skipAuth` meaning that requests here should *not* be limited to authenticated users. However, the critical `createTestimonial` and `deleteTestimonial` mutations are marked `@requireAuth`, and so can only be called by a logged in user. + +Redwood's backend GraphQL server is powered by [GraphQL Yoga](https://the-guild.dev/graphql/yoga-server) and so you have access to everything that makes Yoga secure and performant: rate and depth limiting, logging, directives, and a ton more. + +#### Auth + +If a user is logged in, they will be available in any of your services in the `context` object, available everywhere, all the time: + +```js +import { db } from 'src/lib/db' +import { AuthenticationError } from '@redwoodjs/graphql-server' + +export const createTestimonial = ({ data }) => { + if (context.currentUser.roles.includes('admin')) { + return db.testimonial.create({ data }) + } else { + throw new AuthenticationError("You are not authorized to create testimonials") + } +} +``` + +So `@requireAuth` and `@skipAuth` provide a gate around entire GraphQL queries for authenticated users, but once inside you can be more fine-grained based on who the user actually is. + +## Generators + +Let's take a look at an often overlooked tool in many frameworks' kit: the command line tools. Redwood has focused extensively on these, and one of the most powerful are the "generators." These are used to create files, setup integrations, execute scripts, start the dev server, and more. + +A huge timesaver is generating layouts, pages and cells. There isn't much boilerplate in Redwood's files, but it's still nice to have them built out for, even going so far as creating tests for the bare functionality (more on tests in a minute). + +They also provide easy access to dev tools like Graphiql (for executing GraphQL functions against your server) and Prisma Studio (providing a full GUI for your database). + +![image](https://github.com/redwoodjs/redwood/assets/300/18c928ff-aa34-4f06-941b-69c8035cee61) + +![image](https://github.com/redwoodjs/redwood/assets/300/11f7553e-26a5-4a8f-b618-b9464828cafa) + +Redwood has setup commands for UI libraries like [Tailwind](https://tailwindcss.com/) and [Mantine](https://mantine.dev/), and even provides access to experimental new features, making it easy to enable and disable them on the fly. + +There's even an interactive console that lets you, for example, execute Prisma queries to fetch data from the database. This comes in handy when you want to double check that your query is fetching the data you think it is, without dropping a bunch of `console.log()` statements in your code and reloading the browser. + +## Jest + +Being able to develop a full-stack application this easily is great, but how do you verify that it's working as intended? That's where a great test suite comes in. [Jest](https://jestjs.io/) is a test framework that, as they say, focuses on simplicity. We felt that it was a natural fit with Redwood, and so most files you can generate will include the related test file automatically (pre-filled with some tests, even!). + +Redwood includes several Jest helpers and matchers, allowing you to mock out GraphQL requests, database data, logged in users, and more. + +* [Scenarios](../../testing#scenarios) accept a simple JSON object and pre-populate your database with just that data so it's in a known state that you can test against. +* [Mock Service Worker](../../testing#mock-service-worker) allow you to simulate the response from API calls, including GraphQL +* `mockCurrentUser()` is a helper that allows you to stub out the user that's logged in on either the `web` or `api` codebase, without having to worry about actually passing them through your auth provider + +![image](https://github.com/redwoodjs/redwood/assets/300/614d9867-9765-474f-8b8b-c9217f3f7dcf) + +You can write Jest tests in both the front- and backend of your app. + +## Storybook + +While Jest can test your code, [Storybook](https://storybook.js.org/) can be used to catalog and test your UI. They call themselves a "frontend workshop for building UI components in isolation" and we couldn't agree more. Build your components separate from your app, even having props be dynamic while viewing their effects. All you have to do is run `yarn redwood storybook`. + +Redwood adds data mocking for Storybook so that you can display components that would normally be populated with data from GraphQL, but without needing a server running. + +![image](https://github.com/redwoodjs/redwood/assets/300/2753a292-01d4-41b9-9975-edc1f8c1c3ac) + +Storybook is strictly a frontend codebase concern. + +## vite, Babel and Typescript + +Notice at no point above did we say "and then we need to write configuration for this package..." Redwood has done all of that for you and will continue to do that with every release of a new version. We're sure you won't miss spending hours or days trying to add and configure a package in your application. You can eject from our default configs, and add custom code if needed, but most apps will never need to do this: everything Just Works. + +We use vite as our bundler, packaging up the frontend code and automatically code splitting on pages. It also serves the frontend (the `web` directory). The backend (the `api` directory) is compiled by Babel and served with [Fastify](https://fastify.dev/). + +The entire framework is ([strictly](https://redwoodjs.com/docs/typescript/strict-mode)) typed so you can autocomplete all the things in your IDE. + +## Deployment + +Redwood's job doesn't end until your application is deployed to the world! That's why we include deploy commands and config to get your app running on the most popular hosts (whether they are serverless or traditional server infrastructure) including: + +* [AWS](https://aws.amazon.com/) +* [Vercel](https://vercel.com/) +* [Google Cloud](https://cloud.google.com/) +* [Azure](https://azure.microsoft.com/en-us/) +* [Render](https://render.com/) +* [Flightcontrol](https://www.flightcontrol.dev/) +* [Netlify](https://www.netlify.com/) +* anywhere [Docker](https://www.docker.com) is accepted + +You can even deploy to your own server via SSH commands (we call that our [Baremetal](../../deploy/baremetal.md) deploy). + +## Coming Soon + +Redwood is still in active development, and we're working on some [features](https://community.redwoodjs.com/c/experimental-features/25) that are on the cutting edge of the React ecosystem: + +* [React Server Components](https://community.redwoodjs.com/t/react-server-components-rsc/5081) and a new transparent, non-GraphQL API +* [SSR/Streaming](https://community.redwoodjs.com/t/render-modes-ssr-streaming-experimental/4858) +* [Realtime and GraphQL Subscriptions](https://community.redwoodjs.com/t/redwoodjs-realtime/5002) +* [Redwood Studio](https://community.redwoodjs.com/t/redwood-studio-experimental/4771) for getting runtime insights into your project +* [Mailer](https://github.com/redwoodjs/redwood/pull/9058) + +These are just a few highlights from our current [Bighorn Epoch](https://tom.preston-werner.com/2023/05/30/redwoods-next-epoch-all-in-on-rsc). You can see the full list and follow along via our Roadmap project board at [www.redwoodjs.com/roadmap](https://redwoodjs.com/roadmap). + +## Backing + +Redwood was created by Tom Preston-Werner, cofounder of GitHub and projects like Semantic Versioning, TOML, Jekyll, and many more. Tom believes that JavaScript applications, specifically full-stack JS applications, are the future of the web, and Redwood has his full support. + +## Updates + +Redwood is constantly being updated and sticks strictly to semantic versioning requirements. You can be sure that there won't be any sudden, breaking changes without a major version revision. Redwood is famous for its [copious release notes](https://community.redwoodjs.com/t/redwood-3-0-0-is-now-available/3989) and comprehensive upgrade guides, and if code changes need to be made to your app, we make every effort to include a codemod script that will make the changes for you. + +## Community + +There's a very active community around Redwood, including a [Discourse forum](https://community.redwoodjs.com/) and [Discord chat](https://discord.gg/redwoodjs), where even members of the core team can be found answering questions. We're building this framework for users like you, and we need your feedback if we're going to be successful! diff --git a/docs/versioned_docs/version-7.0/tutorial/chapter1/file-structure.md b/docs/versioned_docs/version-7.0/tutorial/chapter1/file-structure.md new file mode 100644 index 000000000000..cbf375035bc8 --- /dev/null +++ b/docs/versioned_docs/version-7.0/tutorial/chapter1/file-structure.md @@ -0,0 +1,150 @@ +# Redwood File Structure + +Let's take a look at the files and directories that were created for us (config files have been excluded for now): + +:::info + +Don't worry about trying to memorize this directory structure right now, it's just a brief overview to get you oriented. Seeing dozens of files before you've even written a single line of code can be daunting, but there's a great organizational structure here, promise. You can also ignore this all for now and we'll touch upon many of these files and directories as we go. + +::: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +``` +├── api +│ ├── db +│ │ └── schema.prisma +│ └── src +│ ├── directives +│ │ ├── requireAuth +│ │ └── skipAuth +│ ├── functions +│ │ └── graphql.js +│ ├── graphql +│ ├── lib +│ │ ├── auth.js +│ │ ├── db.js +│ │ └── logger.js +│ └── services +│ +├── scripts +│ └── seed.js +│ +└── web + ├── public + │ ├── favicon.png + │ ├── README.md + │ └── robots.txt + └── src + ├── components + ├── layouts + ├── pages + │ ├── FatalErrorPage + │ │ └── FatalErrorPage.jsx + │ └── NotFoundPage + │ └── NotFoundPage.jsx + ├── App.jsx + ├── entry.client.jsx + ├── index.css + ├── index.html + └── Routes.jsx +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +``` +├── api +│ ├── db +│ │ └── schema.prisma +│ └── src +│ ├── directives +│ │ ├── requireAuth +│ │ └── skipAuth +│ ├── functions +│ │ └── graphql.ts +│ ├── graphql +│ ├── lib +│ │ ├── auth.ts +│ │ ├── db.ts +│ │ └── logger.ts +│ └── services +│ +├── scripts +│ └── seed.ts +│ +└── web + ├── public + │ ├── favicon.png + │ ├── README.md + │ └── robots.txt + └── src + ├── components + ├── layouts + ├── pages + │ ├── FatalErrorPage + │ │ └── FatalErrorPage.tsx + │ └── NotFoundPage + │ └── NotFoundPage.tsx + ├── App.tsx + ├── entry.client.tsx + ├── index.css + ├── index.html + └── Routes.tsx +``` + +</TabItem> +</Tabs> + +At the top level we have three directories, `api`, `scripts` and `web`. Redwood separates the backend (`api`) and frontend (`web`) concerns into their own paths in the codebase. ([Yarn refers to these as "workspaces"](https://yarnpkg.com/features/workspaces). In Redwood, we refer to them as "sides.") When you add packages going forward you'll need to specify which workspace they should go in. For example (**don't run these commands**, we're just looking at the syntax): + +```bash +yarn workspace web add marked +yarn workspace api add better-fs +``` + +`scripts` is meant to hold any Node scripts you may need to run from the command line that aren't directly related to the api or web sides. The file that's in there, `seed.{js,ts}` is used to populate your database with any data that needs to exist for your app to run at all (maybe an admin user or site configuration). + +### The /api Directory + +Within `api` there are four directories: + +- `db` contains the plumbing for the database: + - `schema.prisma` contains the database schema (tables and columns) + + After we add our first database table, there will also be a SQLite database file named `dev.db` and a directory called `migrations` created for us. `migrations` contains the files that act as snapshots of the database schema changing over time. + +- `dist` contains the compiled code for the api side and can be ignored when developing. + +- `src` contains all your backend code. `api/src` contains five more directories: + - `directives` will contain GraphQL [schema directives](https://www.graphql-tools.com/docs/schema-directives) for controlling access to queries and transforming values. + - `functions` will contain any [lambda functions](https://docs.netlify.com/functions/overview/) your app needs in addition to the `graphql.{js,ts}` file auto-generated by Redwood. This file is required to use the GraphQL API. + - `graphql` contains your GraphQL schema written in a Schema Definition Language (the files will end in `.sdl.{js,ts}`). + - `lib` contains a few files:`auth.{js,ts}` starts as a placeholder for adding auth functionality and has a couple of bare-bones functions in it to start, `db.{js,ts}` instantiates the Prisma database client so we can talk to a database and `logger.{js,ts}` which configures, well, logging. You can use this directory for other code related to the API side that doesn't really belong anywhere else. + - `services` contains business logic related to your data. When you're querying or mutating data for GraphQL (known as **resolvers**), that code ends up here, but in a format that's reusable in other places in your application. + +- And finally `types` contains automatically compiled GraphQL types and can be ignored during development + +That's it for the backend. + +### The /web Directory + +- `public` contains assets not used by React components (they will be copied over unmodified to the final app's root directory): + - `favicon.png` is the icon that goes in a browser tab when your page is open (apps start with the RedwoodJS logo). + - `README.md` explains how, and when, to use the `public` folder for static assets. It also covers best practices for importing assets within components via Vite. You can also [read this README.md file on GitHub](https://github.com/redwoodjs/redwood/blob/main/packages/create-redwood-app/templates/ts/web/public). + - `robots.txt` can be used to control what web indexers are [allowed to do](https://www.robotstxt.org/robotstxt.html). + +- `src` contains several subdirectories: + - `components` contains your traditional React components as well as Redwood _Cells_ (more about those soon). + - `layouts` contain HTML/components that wrap your content and are shared across _Pages_. + - `pages` contain components and are optionally wrapped inside _Layouts_ and are the "landing page" for a given URL (a URL like `/articles/hello-world` will map to one page and `/contact-us` will map to another). There are two pages included in a new app: + - `NotFoundPage.{jsx,tsx}` will be served when no other route is found (see `Routes.{jsx,tsx}` below). + - `FatalErrorPage.{jsx,tsx}` will be rendered when there is an uncaught error that can't be recovered from and would otherwise cause our application to really blow up (normally rendering a blank page). + - `App.{jsx,tsx}` the bootstrapping code to get our Redwood app up and running. + - `entry.client.{jsx,tsx}` is the standard React starting point for our app. + - `index.css` is a good starting place for custom CSS, but there are many options (we like [TailwindCSS](https://tailwindcss.com/) which, believe it or not, may not require you to write any custom CSS for the life of your app!) + - `index.html` is what's first sent to a visitor's browser. It fetches `entry.client.{jsx,tsx}`. + - `Routes.{jsx,tsx}` the route definitions for our app which map a URL to a _Page_. + +We'll dip in and out of these directories and files (and create some new ones) as we work through the tutorial. diff --git a/docs/versioned_docs/version-7.0/tutorial/chapter1/first-page.md b/docs/versioned_docs/version-7.0/tutorial/chapter1/first-page.md new file mode 100644 index 000000000000..877488339e73 --- /dev/null +++ b/docs/versioned_docs/version-7.0/tutorial/chapter1/first-page.md @@ -0,0 +1,165 @@ +# Our First Page + +Let's give our users something to look at besides the (awesome) Redwood welcome page (thanks [@alicelovescake](https://github.com/alicelovescake)!). We'll use the `redwood` command line tool to create a page for us: + +```bash +yarn redwood generate page home / +``` + +The command above does four things: + +- Creates `web/src/pages/HomePage/HomePage.{jsx,tsx}`. Redwood takes the name you specified as the first argument after `page` and [PascalCases](https://techterms.com/definition/pascalcase) it, then appends "Page" to construct your new page component. So "home" becomes "HomePage". +- Creates a test file to go along with this new page component at `web/src/pages/HomePage/HomePage.test.{jsx,tsx}` with a single, passing test. You _do_ write tests for your components, _don't you??_ +- Creates a Storybook file for this component at `web/src/pages/HomePage/HomePage.stories.{jsx,tsx}`. Storybook is a wonderful tool for efficiently developing and organizing UI components. (If you want to take a peek ahead, we learn about Storybook in [chapter 5 of the tutorial](../chapter5/storybook.md)). +- Adds a `<Route>` in `web/src/Routes.{jsx,tsx}` that maps the path `/` to the new _HomePage_ page. + +:::info Automatic import of pages in the Routes file + +If you look in Routes you'll notice that we're referencing a component, `HomePage`, that isn't imported anywhere. Redwood automatically imports all pages in the Routes file since we're going to need to reference them all anyway. It saves a potentially huge `import` declaration from cluttering up the routes file. + +::: + +In case you didn't notice, this page is already live (your browser automatically reloaded): + +![Default HomePage render](https://user-images.githubusercontent.com/300/148600239-6a147031-74bb-43e8-b4ef-776b4e2a2cc5.png) + +It's not pretty, but it's a start! Open the page in your editor, change some text and save. Your browser should reload with your new text. + +### Routing + +Open up `web/src/Routes.{jsx,tsx}` and take a look at the route that was created: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/Routes.jsx" +import { Router, Route } from '@redwoodjs/router' + +const Routes = () => { + return ( + <Router> + // highlight-next-line + <Route path="/" page={HomePage} name="home" /> + <Route notfound page={NotFoundPage} /> + </Router> + ) +} + +export default Routes +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/Routes.tsx" +import { Router, Route } from '@redwoodjs/router' + +const Routes = () => { + return ( + <Router> + // highlight-next-line + <Route path="/" page={HomePage} name="home" /> + <Route notfound page={NotFoundPage} /> + </Router> + ) +} + +export default Routes +``` + +</TabItem> +</Tabs> + +As long as you have a route with path `/`, you'll never see the initial Redwood splash screen again. + +When no route can be found that matches the requested URL, Redwood will render the `NotFoundPage`. + +Try changing the route to something like: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx +<Route path="/hello" page={HomePage} name="home" /> +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx +<Route path="/hello" page={HomePage} name="home" /> +``` + +</TabItem> +</Tabs> + +The splash screen is available again at [http://localhost:8910/](http://localhost:8910/), giving you a list of all the available URLs in your app. + +![Redwood Splash Screen](https://user-images.githubusercontent.com/17789536/160120107-1157af8e-4cbd-4ec8-b3aa-8adb28ea6eaf.png) + +Go to `/hello` and you should see the homepage again. + +Change the route path back to `/` before continuing! + +### Simple Styles + +Previous versions of this tutorial had you build everything without any styling, so we could really focus on the code, but let's face it: an unstyled site is pretty ugly. Let's add a really simple stylesheet that will just make things a *little* easier on the eyes as we build out the site. Paste the following into `web/src/index.css`: + +```css title="web/src/index.css" +body { + font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol"; +} +ul { + list-style-type: none; + margin: 1rem 0; + padding: 0; +} +li { + display: inline-block; + margin: 0 1rem 0 0 ; +} +h1 > a { + text-decoration: none; + color: black; +} +button, input, label, textarea { + display: block; + outline: none; +} +label { + margin-top: 1rem; +} +.error { + color: red; +} +input.error, textarea.error { + border: 1px solid red; +} +.form-error { + color: red; + background-color: lavenderblush; + padding: 1rem; + display: inline-block; +} +.form-error ul { + list-style-type: disc; + margin: 1rem; + padding: 1rem; +} +.form-error li { + display: list-item; +} +.flex-between { + display: flex; + justify-content: space-between; +} +.flex-between button { + display: inline; +} +``` + +These styles will switch to whatever your OS's system font is, put a little margin between things, and just generally clean things up. Feel free to tweak it to your liking (or ignore these styles completely and stick with the browser default) but keep in mind that the following screenshots are made against this base stylesheet so your experience may vary. + +![Default homepage with custom styles](https://user-images.githubusercontent.com/300/148600516-f8e048aa-451f-46f0-9749-078d63fe7b07.png) + +Looking better already! diff --git a/docs/versioned_docs/version-7.0/tutorial/chapter1/installation.md b/docs/versioned_docs/version-7.0/tutorial/chapter1/installation.md new file mode 100644 index 000000000000..266664f74687 --- /dev/null +++ b/docs/versioned_docs/version-7.0/tutorial/chapter1/installation.md @@ -0,0 +1,55 @@ +# Installation & Starting Development + +We'll use yarn ([yarn](https://yarnpkg.com/getting-started/install) is a requirement) to create the basic structure of our app: + + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```bash +yarn create redwood-app ./redwoodblog +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```bash +yarn create redwood-app --ts ./redwoodblog +``` + +</TabItem> +</Tabs> + +You'll have a new directory `redwoodblog` containing several directories and files. Change to that directory and we'll start the development server: + +```bash +cd redwoodblog +yarn install +yarn redwood dev +``` + +A browser should automatically open to [http://localhost:8910](http://localhost:8910) and you will see the Redwood welcome page: + +![Redwood Welcome Page](https://user-images.githubusercontent.com/300/145314717-431cdb7a-1c45-4aca-9bbc-74df4f05cc3b.png) + +:::tip + +Remembering the port number is as easy as counting: 8-9-10! + +::: + +The splash page gives you links to many resources, but don't get distracted: we've got a job to do! + +### First Commit + +Now that we have the skeleton of our Redwood app in place, it's a good idea to save the current state of the app as your first commit...just in case. + +```bash +git init +git add . +git commit -m 'First commit' +``` + +[git](https://git-scm.com/) is another of those concepts we assume you know, but you *can* complete the tutorial without it. Well, almost: you won't be able to deploy! At the end we'll be deploying to a provider that requires your codebase to be hosted in either [GitHub](https://github.com) or [GitLab](https://gitlab.com). + +If you're not worried about deployment for now, you can go ahead and complete the tutorial without using `git` at all. diff --git a/docs/versioned_docs/version-7.0/tutorial/chapter1/layouts.md b/docs/versioned_docs/version-7.0/tutorial/chapter1/layouts.md new file mode 100644 index 000000000000..4efbd849f81f --- /dev/null +++ b/docs/versioned_docs/version-7.0/tutorial/chapter1/layouts.md @@ -0,0 +1,397 @@ +# Layouts + +One way to solve the duplication of the `<header>` would be to create a `<Header>` component and include it in both `HomePage` and `AboutPage`. That works, but is there a better solution? Ideally there should only be one reference to the `<header>` anywhere in our code. + +When you look at these two pages what do they really care about? They have some content they want to display. They really shouldn't have to care what comes before (like a `<header>`) or after (like a `<footer>`). That's where layouts come in: they wrap a page in a component that then renders the page as its child. The layout can contain any content that's outside the page itself. Conceptually, the final rendered document will be structured something like: + +<img src="https://user-images.githubusercontent.com/300/70486228-dc874500-1aa5-11ea-81d2-eab69eb96ec0.png" alt="Layouts structure diagram" width="300"/> + +Let's create a layout to hold that `<header>`: + +```bash +yarn redwood g layout blog +``` + +:::tip + +From now on we'll use the shorter `g` alias instead of `generate` + +::: + +That created `web/src/layouts/BlogLayout/BlogLayout.{jsx,tsx}` and associated test and stories files. We're calling this the "blog" layout because we may have other layouts at some point in the future (an "admin" layout, perhaps?). + +Cut the `<header>` from both `HomePage` and `AboutPage` and paste it in the layout instead. Let's take out the duplicated `<main>` tag as well: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/layouts/BlogLayout/BlogLayout.jsx" +// highlight-next-line +import { Link, routes } from '@redwoodjs/router' + +const BlogLayout = ({ children }) => { + return ( + // highlight-start + <> + <header> + <h1>Redwood Blog</h1> + <nav> + <ul> + <li> + <Link to={routes.about()}>About</Link> + </li> + </ul> + </nav> + </header> + <main>{children}</main> + </> + // highlight-end + ) +} + +export default BlogLayout +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/layouts/BlogLayout/BlogLayout.tsx" +// highlight-next-line +import { Link, routes } from '@redwoodjs/router' + +type BlogLayoutProps = { + children?: React.ReactNode +} + +const BlogLayout = ({ children }: BlogLayoutProps) => { + return ( + // highlight-start + <> + <header> + <h1>Redwood Blog</h1> + <nav> + <ul> + <li> + <Link to={routes.about()}>About</Link> + </li> + </ul> + </nav> + </header> + <main>{children}</main> + </> + // highlight-end + ) +} + +export default BlogLayout +``` + +</TabItem> +</Tabs> + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/pages/AboutPage/AboutPage.jsx" +import { Link, routes } from '@redwoodjs/router' +import { MetaTags } from '@redwoodjs/web' + +const AboutPage = () => { + return ( + <> + <MetaTags title="About" description="About page" /> + + <p> + This site was created to demonstrate my mastery of Redwood: Look on my + works, ye mighty, and despair! + </p> + <Link to={routes.home()}>Return home</Link> + </> + ) +} + +export default AboutPage +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/pages/AboutPage/AboutPage.tsx" +import { Link, routes } from '@redwoodjs/router' +import { MetaTags } from '@redwoodjs/web' + +const AboutPage = () => { + return ( + <> + <MetaTags title="About" description="About page" /> + + <p> + This site was created to demonstrate my mastery of Redwood: Look on my + works, ye mighty, and despair! + </p> + <Link to={routes.home()}>Return home</Link> + </> + ) +} + +export default AboutPage +``` + +</TabItem> +</Tabs> + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/pages/HomePage/HomePage.jsx" +import { MetaTags } from '@redwoodjs/web' + +const HomePage = () => { + return ( + <> + <MetaTags title="Home" description="Home page" /> + Home + </> + ) +} + +export default HomePage +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/pages/HomePage/HomePage.tsx" +import { MetaTags } from '@redwoodjs/web' + +const HomePage = () => { + return ( + <> + <MetaTags title="Home" description="Home page" /> + Home + </> + ) +} + +export default HomePage +``` + +</TabItem> +</Tabs> + +In `BlogLayout.{jsx,tsx}`, `children` is where the magic will happen. Any page content given to the layout will be rendered here. And now the pages are back to focusing on the content they care about (we can remove the import for `Link` and `routes` from `HomePage` since those are in the Layout instead). + +To actually render our layout we'll need to make a change to our routes files. We'll wrap `HomePage` and `AboutPage` with the `BlogLayout`, using a `<Set>`. Unlike pages, we do actually need an `import` statement for layouts: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/Routes.jsx" +// highlight-start +import { Router, Route, Set } from '@redwoodjs/router' +import BlogLayout from 'src/layouts/BlogLayout' +// highlight-end + +const Routes = () => { + return ( + <Router> + // highlight-start + <Set wrap={BlogLayout}> + <Route path="/about" page={AboutPage} name="about" /> + <Route path="/" page={HomePage} name="home" /> + </Set> + // highlight-end + <Route notfound page={NotFoundPage} /> + </Router> + ) +} + +export default Routes +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/Routes.tsx" +// highlight-start +import { Router, Route, Set } from '@redwoodjs/router' +import BlogLayout from 'src/layouts/BlogLayout' +// highlight-end + +const Routes = () => { + return ( + <Router> + // highlight-start + <Set wrap={BlogLayout}> + <Route path="/about" page={AboutPage} name="about" /> + <Route path="/" page={HomePage} name="home" /> + </Set> + // highlight-end + <Route notfound page={NotFoundPage} /> + </Router> + ) +} + +export default Routes +``` + +</TabItem> +</Tabs> + +:::info The `src` alias + +Notice that the import statement uses `src/layouts/BlogLayout` and not `../src/layouts/BlogLayout` or `./src/layouts/BlogLayout`. Being able to use just `src` is a convenience feature provided by Redwood: `src` is an alias to the `src` path in the current workspace. So if you're working in `web` then `src` points to `web/src` and in `api` it points to `api/src`. + +::: + +Back to the browser (you may need to manually refresh) and you should see...nothing different. But that's good, it means our layout is working! + +:::info Why are things named the way they are? + +You may have noticed some duplication in Redwood's file names. Pages live in a directory called `/pages` and also contain `Page` in their name. Same with Layouts. What's the deal? + +When you have dozens of files open in your editor it's easy to get lost, especially when you have several files with names that are similar or even the same (they happen to be in different directories). Imagine a dozen files named `index.{js,ts}` and then trying to find the one you're looking for in your open tabs! We've found that the extra duplication in the names of files is worth the productivity benefit when scanning for a specific open file. + +If you're using the [React Developer Tools](https://chrome.google.com/webstore/detail/react-developer-tools/fmkadmapgofadopljbjfkapdkoienihi?hl=en) plugin this also helps disambiguate when browsing through your component stack: + +<img src="https://user-images.githubusercontent.com/300/145901282-e4b6ec92-8cee-42d0-97ea-1ffe99328e53.png" width="400"/> + +::: + +### Back Home Again + +A couple more `<Link>`s: let's have the title/logo link back to the homepage, and we'll add a nav link to Home as well: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/layouts/BlogLayout/BlogLayout.jsx" +import { Link, routes } from '@redwoodjs/router' + +const BlogLayout = ({ children }) => { + return ( + <> + <header> + // highlight-start + <h1> + <Link to={routes.home()}>Redwood Blog</Link> + </h1> + // highlight-end + <nav> + <ul> + // highlight-start + <li> + <Link to={routes.home()}>Home</Link> + </li> + // highlight-end + <li> + <Link to={routes.about()}>About</Link> + </li> + </ul> + </nav> + </header> + <main>{children}</main> + </> + ) +} + +export default BlogLayout +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/layouts/BlogLayout/BlogLayout.tsx" +import { Link, routes } from '@redwoodjs/router' + +type BlogLayoutProps = { + children?: React.ReactNode +} + +const BlogLayout = ({ children }: BlogLayoutProps) => { + return ( + <> + <header> + // highlight-start + <h1> + <Link to={routes.home()}>Redwood Blog</Link> + </h1> + // highlight-end + <nav> + <ul> + // highlight-start + <li> + <Link to={routes.home()}>Home</Link> + </li> + // highlight-end + <li> + <Link to={routes.about()}>About</Link> + </li> + </ul> + </nav> + </header> + <main>{children}</main> + </> + ) +} + +export default BlogLayout +``` + +</TabItem> +</Tabs> + +And then we can remove the extra "Return to Home" link (and Link/routes import) that we had on the About page: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/pages/AboutPage/AboutPage.jsx" +import { MetaTags } from '@redwoodjs/web' + +const AboutPage = () => { + return ( + <> + <MetaTags title="About" description="About page" /> + + <p> + This site was created to demonstrate my mastery of Redwood: Look on my + works, ye mighty, and despair! + </p> + </> + ) +} + +export default AboutPage +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/pages/AboutPage/AboutPage.tsx" +import { MetaTags } from '@redwoodjs/web' + +const AboutPage = () => { + return ( + <> + <MetaTags title="About" description="About page" /> + + <p> + This site was created to demonstrate my mastery of Redwood: Look on my + works, ye mighty, and despair! + </p> + </> + ) +} + +export default AboutPage +``` + +</TabItem> +</Tabs> + +![image](https://user-images.githubusercontent.com/300/145901020-1c33bb74-78f9-415e-a8c8-c8873bd6630f.png) + +Now we're getting somewhere! We removed all of that duplication and our header content (logo and navigation) are all in one place. + +Everything we've done so far has been on the web side, which is all in the browser. Let's start getting the backend involved and see what all the hoopla is about GraphQL, Prisma and databases. diff --git a/docs/versioned_docs/version-7.0/tutorial/chapter1/prerequisites.md b/docs/versioned_docs/version-7.0/tutorial/chapter1/prerequisites.md new file mode 100644 index 000000000000..2fe986a41784 --- /dev/null +++ b/docs/versioned_docs/version-7.0/tutorial/chapter1/prerequisites.md @@ -0,0 +1,70 @@ +# Prerequisites + +<div class="video-container"> + <iframe src="https://www.youtube.com/embed/HJOzmp8oCIQ?rel=0" frameborder="0" allow="accelerometer; autoplay; encrypted-media; gyroscope; picture-in-picture; modestbranding; showinfo=0; fullscreen"></iframe> +</div> + +Redwood is composed of several popular libraries to make full-stack web development easier. Unfortunately, we can't teach all of those technologies from scratch during this tutorial, so we're going to assume you are already familiar with a few core concepts: + +- [React](https://react.dev/) +- [GraphQL](https://graphql.org/) +- [Prisma](https://prisma.io/) +- [Jamstack Deployment](https://jamstack.org/) + +**Don't panic!** You can work through this tutorial without knowing much of anything about these technologies. You may find yourself getting lost in terminology that we don't stop and take the time to explain, but that's okay: just know that the nitty-gritty details of how those technologies work is out there and there will be plenty of time to learn them. As you learn more about them you'll start to see the lines between what Redwood provides on top of the stock implementations of these projects. + +You could definitely learn them all at once, but it will be harder to determine where one ends and another begins, which makes it more difficult to find help once you're past the tutorial and want to dive deeper into one technology or another. Our advice? Make it through the tutorial and then start building something on your own! When you find that what you learned in the tutorial doesn't exactly apply to a feature you're trying to build, Google for where you're stuck ("prisma select only some fields") and you'll be an expert in no time. And don't forget our [Discourse](https://community.redwoodjs.com/) and [Discord](https://discord.gg/jjSYEQd) where you can get help from the creators of the framework, as well as tons of helpful community members. + +### Redwood Versions + +You will want to be on at least version 7.0.0 to complete the tutorial. If this is your first time using Redwood then no worries: the latest version will be installed automatically when you create your app skeleton! + +If you have an existing site created with a prior version, you'll need to upgrade and (most likely) apply code modifications. Follow this two step process: + +1. For _each_ version included in your upgrade, follow the "Code Modifications" section or "Upgrade Guide" of the specific version's Release Notes: + - [Redwood Releases](https://github.com/redwoodjs/redwood/releases) +2. Then upgrade to the latest version. Run the command: + - `yarn redwood upgrade` + +### Node.js and Yarn Versions + +During installation, RedwoodJS checks if your system meets version requirements for Node and Yarn: + +- node: "=20.x" +- yarn: ">=1.22.21" + +If you're using a version of Node or Yarn that's **less** than what's required, _the installation bootstrap will result in an ERROR_. To check, please run the following from your terminal command line: + +```bash +node --version +yarn --version +``` + +Please do upgrade accordingly. Then proceed to the Redwood installation when you're ready! + +:::info Installing Node and Yarn + +There are many ways to install and manage both Node.js and Yarn. If you're installing for the first time, we recommend the following: + +**1. Node.js** +Using the recommended [LTS version from Nodejs.org](https://nodejs.org/en/) is preferred. + +- `nvm` is a great tool for managing multiple versions of Node on one system. It takes a bit more effort to set up and learn, however. Follow the [nvm installation instructions](https://github.com/nvm-sh/nvm#installing-and-updating). (Windows users should go to [nvm-windows](https://github.com/coreybutler/nvm-windows/releases)). For **Mac** users with Homebrew installed, you can alternatively use it to [install `nvm`](https://formulae.brew.sh/formula/nvm). Or, refer to our how to guide [using nvm](../../how-to/using-nvm.md). + +**2. Yarn** +As of Node.js v18+, Node.js ships with a CLI tool called [Corepack](https://nodejs.org/docs/latest-v18.x/api/corepack.html) to manage package managers. All you have to do is enable it, then you'll have Yarn: + +``` +corepack enable +yarn -v +``` + +The version of Yarn will probably be `1.22.21`, but don't worry—in your Redwood project, Corepack will know to use a modern version of Yarn because of the `packageManager` field in the root `package.json`. + +**Windows:** Recommended Development Setup + +- JavaScript development on Windows has specific requirements in addition to Yarn and npm. Follow our simple setup guide: + + [Recommended Windows Development Setup](../../how-to/windows-development-setup.md) + +::: diff --git a/docs/versioned_docs/version-7.0/tutorial/chapter1/second-page.md b/docs/versioned_docs/version-7.0/tutorial/chapter1/second-page.md new file mode 100644 index 000000000000..fa25bc7eb26a --- /dev/null +++ b/docs/versioned_docs/version-7.0/tutorial/chapter1/second-page.md @@ -0,0 +1,186 @@ +# A Second Page and a Link + +Let's create an "About" page for our blog so everyone knows about the geniuses behind this achievement. We'll create another page using `redwood`: + +```bash +yarn redwood generate page about +``` + +Notice that we didn't specify a route path this time. If you leave it off the `redwood generate page` command, Redwood will create a `Route` and give it a path that is the same as the page name you specified, prepended with a slash. In this case it will be `/about`. + +:::info Code-splitting each page + +As you add more pages to your app, you may start to worry that more and more code has to be downloaded by the client on any initial page load. Fear not! Redwood will automatically code-split on each Page, which means that initial page loads can be blazingly fast, and you can create as many Pages as you want without having to worry about impacting overall bundle size. If, however, you do want specific Pages to be included in the main bundle, you can [override the default behavior](../../router.md#not-code-splitting). + +::: + +[http://localhost:8910/about](http://localhost:8910/about) should show our new page: + +![About page](https://user-images.githubusercontent.com/300/145647906-56b02a6c-b92c-40c6-9d37-860584ffaa6b.png) + +But no one's going to find it by manually changing the URL so let's add a link from our homepage to the About page and vice versa. We'll start by creating a simple header and nav bar at the same time on the HomePage: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/pages/HomePage/HomePage.jsx" +import { Link, routes } from '@redwoodjs/router' +import { MetaTags } from '@redwoodjs/web' + +const HomePage = () => { + return ( + <> + <MetaTags title="Home" description="Home page" /> + + // highlight-start + <header> + <h1>Redwood Blog</h1> + <nav> + <ul> + <li> + <Link to={routes.about()}>About</Link> + </li> + </ul> + </nav> + </header> + <main>Home</main> + // highlight-end + </> + ) +} + +export default HomePage +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/pages/HomePage/HomePage.tsx" +import { Link, routes } from '@redwoodjs/router' +import { MetaTags } from '@redwoodjs/web' + +const HomePage = () => { + return ( + <> + <MetaTags title="Home" description="Home page" /> + + // highlight-start + <header> + <h1>Redwood Blog</h1> + <nav> + <ul> + <li> + <Link to={routes.about()}>About</Link> + </li> + </ul> + </nav> + </header> + <main>Home</main> + // highlight-end + </> + ) +} + +export default HomePage +``` + +</TabItem> +</Tabs> + +Let's point out a few things here: + +- Redwood loves [Function Components](https://www.robinwieruch.de/react-function-component). We'll make extensive use of [React Hooks](https://react.dev/reference/react) as we go and these are only enabled in function components. Now that Redwood is on React 18, we discourage using class components since they won't be able to take advantage of React's concurrent rendering features. +- Redwood's `<Link>` tag, in its most basic usage, takes a single `to` attribute. That `to` attribute calls a [_named route function_](../../router.md#link-and-named-route-functions) to generate the correct URL. The function has the same name as the `name` attribute on the `<Route>`: + + `<Route path="/about" page={AboutPage} name="about" />` + + If you don't like the name or path that `redwood generate` created for your route, feel free to change it in `Routes.{jsx,tsx}`! Named routes are awesome because if you ever change the path associated with a route (like going from `/about` to `/about-us`), you need only change it in `Routes.{jsx,tsx}` and every link using a named route function (`routes.about()`) will still point to the correct place! You can also pass a string to the `to` prop (`to="/about"`), but now if your path ever changed you would need to find and replace every instance of `/about` to `/about-us`. + +### Back Home + +Once we get to the About page we don't have any way to get back so let's add a link there as well: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/pages/AboutPage/AboutPage.jsx" +import { Link, routes } from '@redwoodjs/router' +import { MetaTags } from '@redwoodjs/web' + +const AboutPage = () => { + return ( + <> + <MetaTags title="About" description="About page" /> + + // highlight-start + <header> + <h1>Redwood Blog</h1> + <nav> + <ul> + <li> + <Link to={routes.about()}>About</Link> + </li> + </ul> + </nav> + </header> + <main> + <p> + This site was created to demonstrate my mastery of Redwood: Look on my + works, ye mighty, and despair! + </p> + <Link to={routes.home()}>Return home</Link> + </main> + // highlight-end + </> + ) +} + +export default AboutPage +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/pages/AboutPage/AboutPage.tsx" +import { Link, routes } from '@redwoodjs/router' +import { MetaTags } from '@redwoodjs/web' + +const AboutPage = () => { + return ( + <> + <MetaTags title="About" description="About page" /> + + // highlight-start + <header> + <h1>Redwood Blog</h1> + <nav> + <ul> + <li> + <Link to={routes.about()}>About</Link> + </li> + </ul> + </nav> + </header> + <main> + <p> + This site was created to demonstrate my mastery of Redwood: Look on my + works, ye mighty, and despair! + </p> + <Link to={routes.home()}>Return home</Link> + </main> + // highlight-end + </> + ) +} + +export default AboutPage +``` + +</TabItem> +</Tabs> + +Great! Try that out in the browser and verify that you can get back and forth. + +![image](https://user-images.githubusercontent.com/300/145899850-2906c2e3-4ec1-4f8a-9c95-e43b0f7da73f.png) + +As a world-class developer you probably saw that copy-and-pasted `<header>` and gasped in disgust. We feel you. That's why Redwood has a little something called _Layouts_. diff --git a/docs/versioned_docs/version-7.0/tutorial/chapter2/cells.md b/docs/versioned_docs/version-7.0/tutorial/chapter2/cells.md new file mode 100644 index 000000000000..e6de453bd3c7 --- /dev/null +++ b/docs/versioned_docs/version-7.0/tutorial/chapter2/cells.md @@ -0,0 +1,672 @@ +# Cells + +The features we listed at the end of the last page (loading state, error messaging, blank slate text) are common in most web apps. We wanted to see if there was something we could do to make developers' lives easier when it comes to adding them to a typical component. We think we've come up with something to help. We call them _Cells_. Cells provide a simpler and more declarative approach to data fetching. ([Read the full documentation about Cells](../../cells.md).) + +In addition to these states, cells are also responsible for their own data fetching. This means that rather than fetching data in some parent component and then passing props down to the child components that need them, a cell is completely self-contained and fetches and displays its own data! Let's add one to our blog to get a feel for how they work. + +When you create a cell you export several specially named constants and then Redwood takes it from there. A typical cell may look something like: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx +export const QUERY = gql` + query FindPosts { + posts { + id + title + body + createdAt + } + } +` + +export const Loading = () => <div>Loading...</div> + +export const Empty = () => <div>No posts yet!</div> + +export const Failure = ({ error }) => ( + <div>Error loading posts: {error.message}</div> +) + +export const Success = ({ posts }) => { + return posts.map((post) => ( + <article key={post.id}> + <h2>{post.title}</h2> + <div>{post.body}</div> + </article> + )) +} +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx +import type { FindPosts } from 'types/graphql' +import type { CellSuccessProps, CellFailureProps } from '@redwoodjs/web' + +export const QUERY = gql` + query FindPosts { + posts { + id + title + body + createdAt + } + } +` + +export const Loading = () => <div>Loading...</div> + +export const Empty = () => <div>No posts yet!</div> + +export const Failure = ({ error }: CellFailureProps) => ( + <div>Error loading posts: {error.message}</div> +) + +export const Success = ({ posts }: CellSuccessProps<FindPosts>) => { + return posts.map((post) => ( + <article key={post.id}> + <h2>{post.title}</h2> + <div>{post.body}</div> + </article> + )) +} +``` + +</TabItem> +</Tabs> + +When React renders this component, Redwood will perform the `QUERY` and display the `Loading` component until a response is received. + +Once the query returns, it will display one of three states: + - If there was an error, the `Failure` component + - If the data return is empty (`null` or empty array), the `Empty` component + - Otherwise, the `Success` component + +There are also some lifecycle helpers like `beforeQuery` (for manipulating any props before being given to the `QUERY`) and `afterQuery` (for manipulating the data returned from GraphQL but before being sent to the `Success` component). + +The minimum you need for a cell are the `QUERY` and `Success` exports. If you don't export an `Empty` component, empty results will be sent to your `Success` component. If you don't provide a `Failure` component, you'll get error output sent to the console. + +A guideline for when to use cells is if your component needs some data from the database or other service that may be delayed in responding. Let Redwood worry about juggling what is displayed when and you can focus on the happy path of the final, rendered component populated with data. + + +<ShowForTs> + +:::tip Wait... what are those types? + +Redwood comes with some built-in utility types. You can see two of them in the example above: `CellSuccessProps` and `CellFailureProps`. Read more about them [here](typescript/utility-types.md). + +Also notice the `FindPosts` type imported from `types/graphql`. This and other types are generated for you automatically—when you have the dev server running—based on the GraphQL query in your Cell. More about generated types [here](typescript/generated-types.md). + +::: + +</ShowForTs> + +### Our First Cell + +Usually in a blog the homepage will display a list of recent posts. This list is a perfect candidate for our first cell. + +:::info Wait, don't we already have a home page? + +We do, but you will generally want to use a *cell* when you need data from the database. A best practice for Redwood is to create a Page for each unique URL your app has, but that you fetch and display data in Cells. So the existing HomePage will render this new cell as a child. + +::: + +As you'll see repeatedly going forward, Redwood has a generator for this feature! Let's call this the "Articles" cell, since "Posts" was already used by our scaffold generator, and although the names won't clash (the scaffold files were created in the `Post` directory), it will be easier to keep them straight in our heads if the names are fairly different from each other. We're going to be showing multiple things, so we'll use the plural version "Articles," rather than "Article": + +```bash +yarn rw g cell Articles +``` + +This command will result in a new file at `/web/src/components/ArticlesCell/ArticlesCell.{jsx,tsx}` (and `test.{jsx,tsx}` `mock.{js,ts}` and `stories.{jsx,tsx}` files—more on those in [chapter 5 of the tutorial](../chapter5/storybook.md)!). This file will contain some boilerplate to get you started: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/ArticlesCell/ArticlesCell.jsx" +export const QUERY = gql` + query ArticlesQuery { + articles { + id + } + } +` + +export const Loading = () => <div>Loading...</div> + +export const Empty = () => <div>Empty</div> + +export const Failure = ({ error }) => ( + <div style={{ color: 'red' }}>Error: {error.message}</div> +) + +export const Success = ({ articles }) => { + return ( + <ul> + {articles.map((item) => { + return <li key={item.id}>{JSON.stringify(item)}</li> + })} + </ul> + ) +} +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/components/ArticlesCell/ArticlesCell.tsx" +import type { ArticlesQuery } from 'types/graphql' +import type { CellSuccessProps, CellFailureProps } from '@redwoodjs/web' + +export const QUERY = gql` + query ArticlesQuery { + articles { + id + } + } +` + +export const Loading = () => <div>Loading...</div> + +export const Empty = () => <div>Empty</div> + +export const Failure = ({ error }: CellFailureProps) => ( + <div style={{ color: 'red' }}>Error: {error.message}</div> +) + +export const Success = ({ articles }: CellSuccessProps<ArticlesQuery>) => { + return ( + <ul> + {articles.map((item) => { + return <li key={item.id}>{JSON.stringify(item)}</li> + })} + </ul> + ) +} +``` + +</TabItem> +</Tabs> + +:::info Indicating Multiplicity to the Cell Generator + +When generating a cell you can use any case you'd like and Redwood will do the right thing when it comes to naming. These will all create the same filename (`web/src/components/BlogArticlesCell/BlogArticlesCell.{jsx,tsx}`): + +```bash +yarn rw g cell blog_articles +yarn rw g cell blog-articles +yarn rw g cell blogArticles +yarn rw g cell BlogArticles +``` + +You will need _some_ kind of indication that you're using more than one word: either snake_case (`blog_articles`), kebab-case (`blog-articles`), camelCase (`blogArticles`) or PascalCase (`BlogArticles`). + +Calling `yarn redwood g cell blogarticles` (without any indication that we're using two words) will generate a file at `web/src/components/BlogarticlesCell/BlogarticlesCell.{jsx,tsx}`. + +::: + +To get you off and running as quickly as possible the generator assumes you've got a root GraphQL query named the same thing as your cell and gives you the minimum query needed to get something out of the database. In this case the query is named `articles`: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/ArticlesCell/ArticlesCell.jsx" +export const QUERY = gql` + query ArticlesQuery { + // highlight-next-line + articles { + id + } + } +` +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/components/ArticlesCell/ArticlesCell.tsx" +export const QUERY = gql` + query ArticlesQuery { + // highlight-next-line + articles { + id + } + } +` +``` + +</TabItem> +</Tabs> + +However, this is not a valid query name for our existing Posts SDL (`api/src/graphql/posts.sdl.{js,ts}`) and Service (`api/src/services/posts/posts.{js,ts}`). (To see where these files come from, go back to the [Creating a Post Editor section](getting-dynamic.md#creating-a-post-editor) in the *Getting Dynamic* part.) Redwood names the query elements after the cell itself for convenience (more often than not you'll be creating a cell for a specific model), but in this case our cell name doesn't match our model name so we'll need to make some manual tweaks. + +We'll have to rename them to `posts` in both the query name and in the prop name in `Success`: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/ArticlesCell/ArticlesCell.jsx" +export const QUERY = gql` + query ArticlesQuery { + // highlight-next-line + posts { + id + } + } +` + +export const Loading = () => <div>Loading...</div> + +export const Empty = () => <div>Empty</div> + +export const Failure = ({ error }) => ( + <div style={{ color: 'red' }}>Error: {error.message}</div> +) + +// highlight-next-line +export const Success = ({ posts }) => { + return ( + <ul> + // highlight-next-line + {posts.map((item) => { + return <li key={item.id}>{JSON.stringify(item)}</li> + })} + </ul> + ) +} +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/components/ArticlesCell/ArticlesCell.tsx" +import type { ArticlesQuery } from 'types/graphql' +import type { CellSuccessProps, CellFailureProps } from '@redwoodjs/web' + +export const QUERY = gql` + query ArticlesQuery { + // highlight-next-line + posts { + id + } + } +` + +export const Loading = () => <div>Loading...</div> + +export const Empty = () => <div>Empty</div> + +export const Failure = ({ error }: CellFailureProps) => ( + <div style={{ color: 'red' }}>Error: {error.message}</div> +) + +// highlight-next-line +export const Success = ({ posts }: CellSuccessProps<ArticlesQuery>) => { + return ( + <ul> + // highlight-next-line + {posts.map((item) => { + return <li key={item.id}>{JSON.stringify(item)}</li> + })} + </ul> + ) +} +``` + +</TabItem> +</Tabs> + +<ShowForTs> + +:::tip Using generated types + +At this point, you might see an error in your Cell while trying to import from `types/graphql`: "The type ArticlesQuery does not exist" + +When you have the dev server (via `yarn rw dev`) running, the CLI watches files for changes and triggers type generation automatically, but you can trigger it manually too by running: + +```bash +yarn rw g types +``` + +This looks at your Cell's `QUERY` and—as long as it's valid—tries to automatically create a TypeScript type for you to use in your code. + +::: + +</ShowForTs> + +Let's plug this cell into our `HomePage` and see what happens: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/pages/HomePage/HomePage.jsx" +import { MetaTags } from '@redwoodjs/web' + +// highlight-next-line +import ArticlesCell from 'src/components/ArticlesCell' + +const HomePage = () => { + return ( + <> + <MetaTags title="Home" description="Home page" /> + // highlight-next-line + <ArticlesCell /> + </> + ) +} + +export default HomePage +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/pages/HomePage/HomePage.tsx" +import { MetaTags } from '@redwoodjs/web' + +// highlight-next-line +import ArticlesCell from 'src/components/ArticlesCell' + +const HomePage = () => { + return ( + <> + <MetaTags title="Home" description="Home page" /> + // highlight-next-line + <ArticlesCell /> + </> + ) +} + +export default HomePage +``` + +</TabItem> +</Tabs> + +The browser should actually show the `id` and a GraphQL-specific `__typename` properties for any posts in the database. If you just see "Empty" then return to the scaffold we created [last time](getting-dynamic.md#creating-a-post-editor) and add a couple. Neat! + +<img src="https://user-images.githubusercontent.com/300/145910525-6a9814d1-0808-4f7e-aeab-303bd5dbac5e.png" alt="Showing articles in the database" /> + +:::info + +**In the `Success` component, where did `posts` come from?** + +In the `QUERY` statement, the query we're calling is `posts`. Whatever the name of this query is, that's the name of the prop that will be available in `Success` with your data. + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```javascript +export const QUERY = gql` + query ArticlesQuery { + // highlight-next-line + posts { + id + } + } +` +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```ts +export const QUERY = gql` + query ArticlesQuery { + // highlight-next-line + posts { + id + } + } +` +``` + +</TabItem> +</Tabs> + +You can also alias the name of the variable containing the result of the GraphQL query, and that will be the name of the prop: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```javascript +export const QUERY = gql` + query ArticlesQuery { + // highlight-next-line + articles: posts { + id + } + } +` +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```ts +export const QUERY = gql` + query ArticlesQuery { + // highlight-next-line + articles: posts { + id + } + } +` +``` + +</TabItem> +</Tabs> + +Now `articles` will be available in `Success` instead of `posts`: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```javascript +export const Success = ({ articles }) => { ... } +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```ts +export const Success = ({ articles }: CellSuccessProps<ArticlesQuery>) => { ... } +``` + +</TabItem> +</Tabs> + +::: + +In fact, let's use the aforementioned alias so that the name of our cell, and the data we're iterating over, is consistent: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/ArticlesCell/ArticlesCell.jsx" +export const QUERY = gql` + query ArticlesQuery { + // highlight-next-line + articles: posts { + id + } + } +` + +export const Loading = () => <div>Loading...</div> + +export const Empty = () => <div>Empty</div> + +export const Failure = ({ error }) => ( + <div style={{ color: 'red' }}>Error: {error.message}</div> +) + +// highlight-next-line +export const Success = ({ articles }) => { + return ( + <ul> + // highlight-next-line + {articles.map((item) => { + return <li key={item.id}>{JSON.stringify(item)}</li> + })} + </ul> + ) +} +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/components/ArticlesCell/ArticlesCell.tsx" +export const QUERY = gql` + query ArticlesQuery { + // highlight-next-line + articles: posts { + id + } + } +` + +export const Loading = () => <div>Loading...</div> + +export const Empty = () => <div>Empty</div> + +export const Failure = ({ error }: CellFailureProps) => ( + <div style={{ color: 'red' }}>Error: {error.message}</div> +) + +// highlight-next-line +export const Success = ({ articles }: CellSuccessProps<ArticlesQuery>) => { + return ( + <ul> + // highlight-next-line + {articles.map((item) => { + return <li key={item.id}>{JSON.stringify(item)}</li> + })} + </ul> + ) +} +``` + +</TabItem> +</Tabs> + +In addition to the `id` that was added to the `query` by the generator, let's get the `title`, `body`, and `createdAt` values as well: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```javascript title="web/src/components/ArticlesCell/ArticlesCell.jsx" +export const QUERY = gql` + query ArticlesQuery { + articles: posts { + id + // highlight-start + title + body + createdAt + // highlight-end + } + } +` +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/components/ArticlesCell/ArticlesCell.tsx" +export const QUERY = gql` + query ArticlesQuery { + articles: posts { + id + // highlight-start + title + body + createdAt + // highlight-end + } + } +` +``` + +</TabItem> +</Tabs> + +The page should now show a dump of all the data you created for any blog posts you scaffolded: + +<img src="https://user-images.githubusercontent.com/300/145911009-b83fd07f-0412-489c-a088-4e89faceea1c.png" alt="Articles with all DB values" /> + +Now we're in the realm of good ol' React components, so just build out the `Success` component to display the blog post in a nicer format: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/ArticlesCell/ArticlesCell.jsx" +export const Success = ({ articles }) => { + return ( + // highlight-start + <> + {articles.map((article) => ( + <article key={article.id}> + <header> + <h2>{article.title}</h2> + </header> + <p>{article.body}</p> + <div>Posted at: {article.createdAt}</div> + </article> + ))} + </> + // highlight-end + ) +} +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/components/ArticlesCell/ArticlesCell.tsx" +export const Success = ({ articles }: CellSuccessProps<ArticlesQuery>) => { + return ( + // highlight-start + <> + {articles.map((article) => ( + <article key={article.id}> + <header> + <h2>{article.title}</h2> + </header> + <p>{article.body}</p> + <div>Posted at: {article.createdAt}</div> + </article> + ))} + </> + // highlight-end + ) +} +``` + +</TabItem> +</Tabs> + +And just like that we have a blog! It may be the most basic blog that ever graced the internet, but it's something! You can create/edit/delete posts and the world can view them on the homepage. (Don't worry, we've got more features to add.) + +![Nicely formatted blog articles](https://user-images.githubusercontent.com/300/145911342-b3a4bb44-e635-4bc5-8df7-a824661b2714.png) + +### Summary + +To recap, what did we actually do to get this far? + +1. Generate the homepage +2. Generate the blog layout +3. Define the database schema +4. Run migrations to update the database and create a table +5. Scaffold a CRUD interface to the database table +6. Create a cell to load the data and take care of loading/empty/failure/success states +7. Add the cell to the page + +The last few steps will become a standard lifecycle of new features as you build a Redwood app. + +So far, other than a little HTML, we haven't had to do much by hand. And we especially didn't have to write a bunch of plumbing just to move data from one place to another. It makes web development a little more enjoyable, don't you think? + +We're going to add some more features to our app, but first let's take a detour to learn about how Redwood accesses our database and what these SDL and services files are for. diff --git a/docs/versioned_docs/version-7.0/tutorial/chapter2/getting-dynamic.md b/docs/versioned_docs/version-7.0/tutorial/chapter2/getting-dynamic.md new file mode 100644 index 000000000000..c67b0712b314 --- /dev/null +++ b/docs/versioned_docs/version-7.0/tutorial/chapter2/getting-dynamic.md @@ -0,0 +1,203 @@ +# Getting Dynamic + +<div class="video-container"> + <iframe src="https://www.youtube.com/embed/cb_PseqpoG8?rel=0" frameborder="0" allow="accelerometer; autoplay; encrypted-media; gyroscope; picture-in-picture; modestbranding; showinfo=0; fullscreen"></iframe> +</div> + +These two pages are great and all but where are the actual blog posts in this blog? Let's work on those next. + +For the purposes of our tutorial we're going to get our blog posts from a database. Because relational databases are still the workhorses of many complex (and not-so-complex) web applications, we've made SQL access first-class. For Redwood apps, it all starts with the schema. + +### Creating the Database Schema + +We need to decide what data we'll need for a blog post. We'll expand on this at some point, but at a minimum we'll want to start with: + +- `id` the unique identifier for this blog post (all of our database tables will have one of these) +- `title` something click-baity like "Top 10 JavaScript Frameworks Named After Trees—You Won't Believe Number 4!" +- `body` the actual content of the blog post +- `createdAt` a timestamp of when this record was created in the database + +We use [Prisma](https://www.prisma.io/) to talk to the database. Prisma has another library called [Migrate](https://www.prisma.io/docs/concepts/components/prisma-migrate) that lets us update the database's schema in a predictable way and snapshot each of those changes. Each change is called a _migration_ and Migrate will create one when we make changes to our schema. + +First let's define the data structure for a post in the database. Open up `api/db/schema.prisma` and add the definition of our Post table (remove any "sample" models that are present in the file, like the `UserExample` model). Once you're done, the entire schema file should look like: + +```javascript title="api/db/schema.prisma" +datasource db { + provider = "sqlite" + url = env("DATABASE_URL") +} + +generator client { + provider = "prisma-client-js" + binaryTargets = "native" +} + +// highlight-start +model Post { + id Int @id @default(autoincrement()) + title String + body String + createdAt DateTime @default(now()) +} +// highlight-end +``` + +This says that we want a table called `Post` and it should have: + +- An `id` column of type `Int` lets Prisma know this is the column it should use as the `@id` (for it to create relationships to other tables) and that the `@default` value should be Prisma's special `autoincrement()` method letting it know that the DB should set it automatically when new records are created +- A `title` field that will contain a `String` +- A `body` field that will contain a `String` +- A `createdAt` field that will be a `DateTime` and will `@default` to `now()` when we create a new record (so we don't have to set the time manually in our app, the database will do it for us) + +:::info Integer vs. String IDs + +For the tutorial we're keeping things simple and using an integer for our ID column. Some apps may want to use a CUID or a UUID, which Prisma supports. In that case you would use `String` for the datatype instead of `Int` and use `cuid()` or `uuid()` instead of `autoincrement()`: + +`id String @id @default(cuid())` + +Integers make for nicer URLs like https://redwoodblog.com/posts/123 instead of https://redwoodblog.com/posts/eebb026c-b661-42fe-93bf-f1a373421a13. + +Take a look at the [official Prisma documentation](https://www.prisma.io/docs/reference/tools-and-interfaces/prisma-schema/data-model#defining-an-id-field) for more on ID fields. + +::: + +### Migrations + +Now we'll want to snapshot the schema changes as a migration: + +```bash +yarn rw prisma migrate dev +``` + +:::tip + +From now on we'll use the shorter `rw` alias instead of the full `redwood` argument. + +::: + +You'll be prompted to give this migration a name. Something that describes what it does is ideal, so how about "create post" (without the quotes, of course). This is for your own benefit—neither Redwood nor Prisma care about the migration's name, it's just a reference when looking through old migrations and trying to find when you created or modified something specific. + +After the command completes you'll see a new subdirectory created under `api/db/migrations` that has a timestamp and the name you gave the migration. It will contain a single file named `migration.sql` that contains the SQL necessary to bring the database structure up-to-date with whatever `schema.prisma` looked like at the time the migration was created. So, you always have a single `schema.prisma` file that describes what the database structure should look like right *now* and the migrations trace the history of the changes that took place to get to the current state. It's kind of like version control for your database structure, which can be pretty handy. + +In addition to creating the migration file, the above command will also execute the SQL against the database, which "applies" the migration. The final result is a new database table called `Post` with the fields we defined above. + +### Prisma Studio + +A database is a pretty abstract thing: where's the data? What's it look like? How can I access it without creating a UI in my web app? Prisma provides a tool called [Studio](https://www.prisma.io/studio) which provides a nice web app view into your database: + +![image](https://user-images.githubusercontent.com/300/145903848-2615027c-dea1-4aff-bc11-02f03ba68de0.png) + +(Ours won't have any data there yet.) To open Prisma Studio, run the command: + +```bash +yarn rw prisma studio +``` + +A new browser should open to [http://localhost:5555](http://localhost:5555) and now you can view and manipulate data in the database directly! + +![image](https://user-images.githubusercontent.com/300/148606893-8d899ce7-4996-4f5e-a7f5-7c8c8483860c.png) + +Click on "Post" and you'll see an empty database table. Let's have our app start putting some posts in there! + +### Creating a Post Editor + +We haven't decided on the look and feel of our site yet, but wouldn't it be amazing if we could play around with posts without having to build a bunch of pages that we'll probably throw away once the design team gets back to us? As you can imagine, we wouldn't have thrown around this scenario unless Redwood had a solution! + +Let's generate everything we need to perform all the CRUD (Create, Retrieve, Update, Delete) actions on posts so we can not only verify that we've got the right fields in the database, but that it will let us get some sample posts in there so we can start laying out our pages and see real content. Redwood has a *generator* for just this occasion: + +```bash +yarn rw g scaffold post +``` + +Let's point the browser to [http://localhost:8910/posts](http://localhost:8910/posts) and see what we have: + +<img src="https://user-images.githubusercontent.com/300/73027952-53c03080-3de9-11ea-8f5b-d62a3676bbef.png" /> + +Well that's barely more than we got when we generated a page. What happens if we click that "New Post" button? + +<img src="https://user-images.githubusercontent.com/300/73028004-72262c00-3de9-11ea-8924-66d1cc1fceb6.png" /> + +Okay, now we're getting somewhere. Fill in the title and body and click "Save". + +<img src="https://user-images.githubusercontent.com/300/73028757-08a71d00-3deb-11ea-8813-046c8479b439.png" /> + +Did we just create a post in the database? And then show that post here on this page? Yup! Try creating another: + +<img src="https://user-images.githubusercontent.com/300/73028839-312f1700-3deb-11ea-8e83-0012a3cf689d.png" /> + +But what if we click "Edit" on one of those posts? + +<img src="https://user-images.githubusercontent.com/300/73031307-9802ff00-3df0-11ea-9dc1-ea9af8f21890.png" /> + +Okay but what if we click "Delete"? + +<img src="https://user-images.githubusercontent.com/300/73031339-aea95600-3df0-11ea-9d58-475d9ef43988.png" /> + +So, Redwood just created all the pages, components and services necessary to perform all CRUD actions on our posts table. No need to even open Prisma Studio or login through a terminal window and write SQL from scratch. Redwood calls these _scaffolds_. + +:::warning + +If you head back to VSCode at some point and get a notice in one of the generated Post cells about `Cannot query "posts" on type "Query"` don't worry: we've seen this from time to time on some systems. There are two easy fixes: + +1. Run `yarn rw g types` in a terminal +2. Reload the GraphQL engine in VSCode: open the Command Palette (Cmd+Shift+P for Mac, Ctrl+Shift+P for Windows) and find "VSCode GraphQL: Manual Restart" + +::: + +Here's what happened when we ran that `yarn rw g scaffold post` command: + +- Created several _pages_ in `web/src/pages/Post`: + - `EditPostPage` for editing a post + - `NewPostPage` for creating a new post + - `PostPage` for showing the detail of a post + - `PostsPage` for listing all the posts +- Created a _layout_ file in `web/src/layouts/ScaffoldLayout/ScaffoldLayout.{jsx,tsx}` that serves as a container for pages with common elements like page heading and "New Posts" button +- Created routes wrapped in the `Set` component with the layout as `ScaffoldLayout` for those pages in `web/src/Routes.{jsx,tsx}` +- Created three _cells_ in `web/src/components/Post`: + - `EditPostCell` gets the post to edit in the database + - `PostCell` gets the post to display + - `PostsCell` gets all the posts +- Created four _components_, also in `web/src/components/Post`: + - `NewPost` displays the form for creating a new post + - `Post` displays a single post + - `PostForm` the actual form used by both the New and Edit components + - `Posts` displays the table of all posts +- Added an _SDL_ file to define several GraphQL queries and mutations in `api/src/graphql/posts.sdl.{jsx,ts}` +- Added a _services_ file in `api/src/services/posts/posts.{js,ts}` that makes the Prisma client calls to get data in and out of the database + +Pages and components/cells are nicely contained in `Post` directories to keep them organized while the layout is at the top level since there's only one of them. + +Whew! That may seem like a lot of stuff but we wanted to follow best-practices and separate out common functionality into individual components, just like you'd do in a real app. Sure we could have crammed all of this functionality into a single component, but we wanted these scaffolds to set an example of good development habits: we have to practice what we preach! + +:::info Generator Naming Conventions + +You'll notice that some of the generated parts have plural names and some have singular. This convention is borrowed from Ruby on Rails which uses a more "human" naming convention: if you're dealing with multiple of something (like the list of all posts) it will be plural. If you're only dealing with a single something (like creating a new post) it will be singular. It sounds natural when speaking, too: "show me a list of all the posts" and "I'm going to create a new post." + +As far as the generators are concerned: + +- Services filenames are always plural. +- The methods in the services will be singular or plural depending on if they are expected to return multiple posts or a single post (`posts` vs. `createPost`). +- SDL filenames are plural. +- Pages that come with the scaffolds are plural or singular depending on whether they deal with many or one post. When using the `page` generator it will stick with whatever name you give on the command line. +- Layouts use the name you give them on the command line. +- Components and cells, like pages, will be plural or singular depending on context when created by the scaffold generator, otherwise they'll use the given name on the command line. +- Route names for scaffolded pages are singular or plural, the same as the pages they're routing to, otherwise they are identical to the name of the page you generated. + +Also note that it's the model name part that's singular or plural, not the whole word. So it's `PostsCell` and `PostsPage`, not `PostCells` or `PostPages`. + +You don't have to follow this convention once you start creating your own parts but we recommend doing so. The Ruby on Rails community has come to love this nomenclature even though many people complained when first exposed to it! + +::: + +### Creating a Blog Homepage + +We could start replacing these pages one by one as we settle on a look and feel for our blog, but do we need to? The public facing site won't let viewers create, edit or delete posts, so there's no reason to re-create the wheel or update these pages with a look and feel that matches the public facing site. Why don't we keep these as our admin pages and create new ones for the public facing site. + +Let's think about what the general public can do and that will inform what pages we need to build: + +1. View a list of posts (without links to edit/delete) +2. View a single post + +Starting with #1, we already have a `HomePage` which would be a logical place to view the list of posts, so let's just add the posts to the existing page. We need to get the content from the database and we don't want the user to just see a blank screen in the meantime (depending on network conditions, server location, etc), so we'll want to show some kind of loading message or animation. And if there's an error retrieving the data we should handle that as well. And what about when we open source this blog engine and someone puts it live without any content in the database? It'd be nice if there was some kind of blank slate message until their first post is created. + +Oh boy, our first page with data and we already have to worry about loading states, errors, and blank slates...or do we? diff --git a/docs/versioned_docs/version-7.0/tutorial/chapter2/routing-params.md b/docs/versioned_docs/version-7.0/tutorial/chapter2/routing-params.md new file mode 100644 index 000000000000..ff502bb040ac --- /dev/null +++ b/docs/versioned_docs/version-7.0/tutorial/chapter2/routing-params.md @@ -0,0 +1,828 @@ +# Routing Params + +Now that we have our homepage listing all the posts, let's build the "detail" page—a canonical URL that displays a single post. First we'll generate the page and route: + +```bash +yarn rw g page Article +``` + +Now let's link the title of the post on the homepage to the detail page (and include the `import` for `Link` and `routes`): + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/ArticlesCell/ArticlesCell.jsx" +// highlight-next-line +import { Link, routes } from '@redwoodjs/router' + +// QUERY, Loading, Empty and Failure definitions... + +export const Success = ({ articles }) => { + return ( + <> + {articles.map((article) => ( + <article key={article.id}> + <header> + <h2> + // highlight-next-line + <Link to={routes.article()}>{article.title}</Link> + </h2> + </header> + <p>{article.body}</p> + <div>Posted at: {article.createdAt}</div> + </article> + ))} + </> + ) +} +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/components/ArticlesCell/ArticlesCell.tsx" +// highlight-next-line +import { Link, routes } from '@redwoodjs/router' + +// QUERY, Loading, Empty and Failure definitions... + +export const Success = ({ articles }: CellSuccessProps<ArticlesQuery>) => { + return ( + <> + {articles.map((article) => ( + <article key={article.id}> + <header> + <h2> + // highlight-next-line + <Link to={routes.article()}>{article.title}</Link> + </h2> + </header> + <p>{article.body}</p> + <div>Posted at: {article.createdAt}</div> + </article> + ))} + </> + ) +} +``` + +</TabItem> +</Tabs> + +If you click the link on the title of the blog post you should see the boilerplate text on `ArticlePage`: + +![Article page](https://user-images.githubusercontent.com/300/146100107-895a37af-7549-46fe-8802-2628fe6b49ed.png) + +But what we really need is to specify _which_ post we want to view on this page. It would be nice to be able to specify the ID of the post in the URL with something like `/article/1`. Let's tell the `<Route>` to expect another part of the URL, and when it does, give that part a name that we can reference later: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/Routes.jsx" +<Route path="/article/{id}" page={ArticlePage} name="article" /> +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/Routes.tsx" +<Route path="/article/{id}" page={ArticlePage} name="article" /> +``` + +</TabItem> +</Tabs> + +Notice the `{id}`. Redwood calls these _route parameters_. They say "whatever value is in this position in the path, let me reference it by the name inside the curly braces". And while we're in the routes file, lets move the route inside the `Set` with the `BlogLayout`. + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/Routes.jsx" +import { Router, Route, Set } from '@redwoodjs/router' +import ScaffoldLayout from 'src/layouts/ScaffoldLayout' +import BlogLayout from 'src/layouts/BlogLayout' + +const Routes = () => { + return ( + <Router> + <Set wrap={ScaffoldLayout} title="Posts" titleTo="posts" buttonLabel="New Post" buttonTo="newPost"> + <Route path="/posts/new" page={PostNewPostPage} name="newPost" /> + <Route path="/posts/{id:Int}/edit" page={PostEditPostPage} name="editPost" /> + <Route path="/posts/{id:Int}" page={PostPostPage} name="post" /> + <Route path="/posts" page={PostPostsPage} name="posts" /> + </Set> + <Set wrap={BlogLayout}> + // highlight-next-line + <Route path="/article/{id}" page={ArticlePage} name="article" /> + <Route path="/about" page={AboutPage} name="about" /> + <Route path="/" page={HomePage} name="home" /> + </Set> + <Route notfound page={NotFoundPage} /> + </Router> + ) +} + +export default Routes +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/Routes.tsx" +import { Router, Route, Set } from '@redwoodjs/router' +import ScaffoldLayout from 'src/layouts/ScaffoldLayout' +import BlogLayout from 'src/layouts/BlogLayout' + +const Routes = () => { + return ( + <Router> + <Set wrap={ScaffoldLayout} title="Posts" titleTo="posts" buttonLabel="New Post" buttonTo="newPost"> + <Route path="/posts/new" page={PostNewPostPage} name="newPost" /> + <Route path="/posts/{id:Int}/edit" page={PostEditPostPage} name="editPost" /> + <Route path="/posts/{id:Int}" page={PostPostPage} name="post" /> + <Route path="/posts" page={PostPostsPage} name="posts" /> + </Set> + <Set wrap={BlogLayout}> + // highlight-next-line + <Route path="/article/{id}" page={ArticlePage} name="article" /> + <Route path="/about" page={AboutPage} name="about" /> + <Route path="/" page={HomePage} name="home" /> + </Set> + <Route notfound page={NotFoundPage} /> + </Router> + ) +} + +export default Routes +``` + +</TabItem> +</Tabs> + +Cool, cool, cool. Now we need to construct a link that has the ID of a post in it: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/ArticlesCell/ArticlesCell.jsx" +<h2> + <Link to={routes.article({ id: article.id })}>{article.title}</Link> +</h2> +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/components/ArticlesCell/ArticlesCell.tsx" +<h2> + <Link to={routes.article({ id: article.id })}>{article.title}</Link> +</h2> +``` + +</TabItem> +</Tabs> + +<ShowForTs> + +:::info Wait... why am I getting a TypeScript error? + +When you have your dev server running, the Redwood CLI will watch your project and generate types. You can regenerate these types manually too, by running `yarn rw g types`. + +In this case, the path `/article/{id}` doesn't specify the type of `id` - so it defaults to `string` - where as our article id is actually a `number`. We'll tackle this in the next few sections - so you can ignore the red squiggle for now, and power through! +::: + +</ShowForTs> + + +For routes with route parameters, the named route function expects an object where you specify a value for each parameter. If you click on the link now, it will indeed take you to `/article/1` (or `/article/2`, etc, depending on the ID of the post). + +You may have noticed that when trying to view the new single-article page that you're getting an error. This is because the boilerplate code included with the page when it was generated includes a link to the page itself—a link which now requires an `id`. Remove the link and your page should be working again: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```diff title="web/src/pages/ArticlePage.js" +- import { Link, routes } from '@redwoodjs/router' + import { MetaTags } from '@redwoodjs/web' + + const ArticlePage = () => { + return ( + <> + <MetaTags title="Article" description="Article page" /> + + <h1>ArticlePage</h1> + <p> + Find me in <code>./web/src/pages/ArticlePage/ArticlePage.js</code> + </p> + <p> + My default route is named <code>article</code>, link to me with ` +- <Link to={routes.article()}>Article</Link>` + </p> + </> + ) + } + + export default ArticlePage +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```diff title="web/src/pages/ArticlePage.tsx" +- import { Link, routes } from '@redwoodjs/router' + import { MetaTags } from '@redwoodjs/web' + + const ArticlePage = () => { + return ( + <> + <MetaTags title="Article" description="Article page" /> + + <h1>ArticlePage</h1> + <p> + Find me in <code>./web/src/pages/ArticlePage/ArticlePage.tsx</code> + </p> + <p> + My default route is named <code>article</code>, link to me with ` +- <Link to={routes.article()}>Article</Link>` + </p> + </> + ) + } + + export default ArticlePage +``` + +</TabItem> +</Tabs> + +### Using the Param + +Ok, so the ID is in the URL. What do we need next in order to display a specific post? It sounds like we'll be doing some data retrieval from the database, which means we want a cell. Note the singular `Article` here since we're only displaying one: + +```bash +yarn rw g cell Article +``` + +And then we'll use that cell in `ArticlePage`: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/pages/ArticlePage/ArticlePage.jsx" +import { MetaTags } from '@redwoodjs/web' +// highlight-next-line +import ArticleCell from 'src/components/ArticleCell' + +const ArticlePage = () => { + return ( + <> + <MetaTags title="Article" description="Article page" /> + + // highlight-next-line + <ArticleCell /> + </> + ) +} + +export default ArticlePage +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/pages/ArticlePage/ArticlePage.tsx" +import { MetaTags } from '@redwoodjs/web' +// highlight-next-line +import ArticleCell from 'src/components/ArticleCell' + +const ArticlePage = () => { + return ( + <> + <MetaTags title="Article" description="Article page" /> + + // highlight-next-line + <ArticleCell /> + </> + ) +} + +export default ArticlePage +``` + +</TabItem> +</Tabs> + +Now over to the cell, we need access to that `{id}` route param so we can look up the ID of the post in the database. Let's alias the real query name `post` to `article` and retrieve some more fields: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/ArticleCell/ArticleCell.jsx" +export const QUERY = gql` + query FindArticleQuery($id: Int!) { + // highlight-next-line + article: post(id: $id) { + id + // highlight-start + title + body + createdAt + // highlight-end + } + } +` + +export const Loading = () => <div>Loading...</div> + +export const Empty = () => <div>Empty</div> + +export const Failure = ({ error }) => ( + <div style={{ color: 'red' }}>Error: {error.message}</div> +) + +export const Success = ({ article }) => { + return <div>{JSON.stringify(article)}</div> +} +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/components/ArticleCell/ArticleCell.tsx" +import type { FindArticleQuery, FindArticleQueryVariables } from 'types/graphql' +import type { CellSuccessProps, CellFailureProps } from '@redwoodjs/web' + +export const QUERY = gql` + query FindArticleQuery($id: Int!) { + // highlight-next-line + article: post(id: $id) { + id + // highlight-start + title + body + createdAt + // highlight-end + } + } +` + +export const Loading = () => <div>Loading...</div> + +export const Empty = () => <div>Empty</div> + +export const Failure = ({ error }: CellFailureProps) => ( + <div style={{ color: 'red' }}>Error: {error.message}</div> +) + +export const Success = ({ article }: CellSuccessProps<FindArticleQuery, FindArticleQueryVariables>) => { + return <div>{JSON.stringify(article)}</div> +} +``` + +</TabItem> +</Tabs> + +Okay, we're getting closer. Still, where will that `$id` come from? Redwood has another trick up its sleeve. Whenever you put a route param in a route, that param is automatically made available to the page that route renders. Which means we can update `ArticlePage` to look like this: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/pages/ArticlePage/ArticlePage.jsx" +import { MetaTags } from '@redwoodjs/web' +import ArticleCell from 'src/components/ArticleCell' + +// highlight-next-line +const ArticlePage = ({ id }) => { + return ( + <> + <MetaTags title="Article" description="Article page" /> + + // highlight-next-line + <ArticleCell id={id} /> + </> + ) +} + +export default ArticlePage +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/pages/ArticlePage/ArticlePage.tsx" +import { MetaTags } from '@redwoodjs/web' +import ArticleCell from 'src/components/ArticleCell' + +// highlight-start +interface Props { + id: number +} +// highlight-end + +// highlight-next-line +const ArticlePage = ({ id }: Props) => { + return ( + <> + <MetaTags title="Article" description="Article page" /> + + // highlight-next-line + <ArticleCell id={id} /> + </> + ) +} + +export default ArticlePage +``` + +</TabItem> +</Tabs> + +`id` already exists since we named our route param `{id}`. Thanks Redwood! But how does that `id` end up as the `$id` GraphQL parameter? If you've learned anything about Redwood by now, you should know it's going to take care of that for you. By default, any props you give to a cell will automatically be turned into variables and given to the query. "No way," you're saying. Way. + +We can prove it! Try going to the detail page for a post in the browser and—uh oh. Hmm: + +![Article error message](https://user-images.githubusercontent.com/300/146100555-cea8806a-70aa-43e5-b2b4-d49d84014c4e.png) + +:::tip + +This error message you're seeing is thanks to the `Failure` section of our Cell! + +::: + +``` +Error: Variable "$id" got invalid value "1"; Int cannot represent non-integer value: "1" +``` + +It turns out that route params are extracted as strings from the URL, but GraphQL wants an integer for the `id`. We could use `parseInt()` to convert it to a number before passing it into `ArticleCell`, but we can do better than that. + +### Route Param Types + +What if you could request the conversion right in the route's path? Introducing **route param types**. It's as easy as adding `:Int` to our existing route param: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/Routes.jsx" +<Route path="/article/{id:Int}" page={ArticlePage} name="article" /> +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/Routes.tsx" +<Route path="/article/{id:Int}" page={ArticlePage} name="article" /> +``` + +</TabItem> +</Tabs> + +Voilà! Not only will this convert the `id` param to a number before passing it to your Page, it will prevent the route from matching unless the `id` path segment consists entirely of digits. If any non-digits are found, the router will keep trying other routes, eventually showing the `NotFoundPage` if no routes match. + +:::info What if I want to pass some other prop to the cell that I don't need in the query, but do need in the Success/Loader/etc. components? + +All of the props you give to the cell will be automatically available as props in the render components. Only the ones that match the GraphQL variables list will be given to the query. You get the best of both worlds! In our post display above, if you wanted to display some random number along with the post (for some contrived, tutorial-like reason), just pass that prop: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx +<ArticleCell id={id} rand={Math.random()} /> +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx +<ArticleCell id={id} rand={Math.random()} /> +``` + +</TabItem> +</Tabs> + +And get it, along with the query result (and even the original `id` if you want) in the component: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```javascript +export const Success = ({ article, id, rand }) => { + // ... +} +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx +interface Props extends CellSuccessProps<FindArticleQuery, FindArticleQueryVariables> { + id: number + rand: number +} + +export const Success = ({ article, id, rand }: Props) => { + // ... +} +``` + +</TabItem> +</Tabs> + +Thanks again, Redwood! + +::: + +### Displaying a Blog Post + +Now let's display the actual post instead of just dumping the query result. We could copy the display from the articles on the homepage, but that's not very reusable! This is the perfect place for a good old fashioned component—define the display once and then reuse the component on the homepage and the article display page. Both `ArticlesCell` and `ArticleCell` will display our new component. Let's Redwood-up a component (I just invented that phrase): + +```bash +yarn rw g component Article +``` + +Which creates `web/src/components/Article/Article.{jsx,tsx}` (and corresponding test and more!) as a super simple React component: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/Article/Article.jsx" +const Article = () => { + return ( + <div> + <h2>{'Article'}</h2> + <p>{'Find me in ./web/src/components/Article/Article.jsx'}</p> + </div> + ) +} + +export default Article +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/components/Article/Article.tsx" +const Article = () => { + return ( + <div> + <h2>{'Article'}</h2> + <p>{'Find me in ./web/src/components/Article/Article.tsx'}</p> + </div> + ) +} + +export default Article +``` + +</TabItem> +</Tabs> + +:::info + +You may notice we don't have any explicit `import` statements for `React` itself. We (the Redwood dev team) got tired of constantly importing it over and over again in every file so we automatically import it for you! + +::: + +Let's copy the `<article>` section from `ArticlesCell` and put it here instead, taking the `article` itself in as a prop: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/Article/Article.jsx" +// highlight-next-line +import { Link, routes } from '@redwoodjs/router' + +// highlight-next-line +const Article = ({ article }) => { + return ( + // highlight-start + <article> + <header> + <h2> + <Link to={routes.article({ id: article.id })}>{article.title}</Link> + </h2> + </header> + <div>{article.body}</div> + <div>Posted at: {article.createdAt}</div> + </article> + // highlight-end + ) +} + +export default Article +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/components/Article/Article.tsx" +// highlight-next-line +import { Link, routes } from '@redwoodjs/router' + +// highlight-next-line +import type { Post } from 'types/graphql' + +// highlight-start +interface Props { + article: Post +} +// highlight-end + +// highlight-next-line +const Article = ({ article }: Props) => { + return ( + // highlight-start + <article> + <header> + <h2> + <Link to={routes.article({ id: article.id })}>{article.title}</Link> + </h2> + </header> + <div>{article.body}</div> + <div>Posted at: {article.createdAt}</div> + </article> + // highlight-end + ) +} + +export default Article +``` + +</TabItem> +</Tabs> + +And update `ArticlesCell` to use this new component instead: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/ArticlesCell/ArticlesCell.jsx" +// highlight-next-line +import Article from 'src/components/Article' + +export const QUERY = gql` + query ArticlesQuery { + articles: posts { + id + title + body + createdAt + } + } +` + +export const Loading = () => <div>Loading...</div> + +export const Empty = () => <div>Empty</div> + +export const Failure = ({ error }) => ( + <div style={{ color: 'red' }}>Error: {error.message}</div> +) + +export const Success = ({ articles }) => { + return ( + <> + {articles.map((article) => ( + // highlight-next-line + <Article key={article.id} article={article} /> + ))} + </> + ) +} +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/components/ArticlesCell/ArticlesCell.tsx" +// highlight-next-line +import Article from 'src/components/Article' + +import type { ArticlesQuery } from 'types/graphql' +import type { CellSuccessProps, CellFailureProps } from '@redwoodjs/web' + +export const QUERY = gql` + query ArticlesQuery { + articles: posts { + id + title + body + createdAt + } + } +` + +export const Loading = () => <div>Loading...</div> + +export const Empty = () => <div>Empty</div> + +export const Failure = ({ error }: CellFailureProps) => ( + <div style={{ color: 'red' }}>Error: {error.message}</div> +) + +export const Success = ({ articles }: CellSuccessProps<ArticlesQuery>) => { + return ( + <> + {articles.map((article) => ( + // highlight-next-line + <Article key={article.id} article={article} /> + ))} + </> + ) +} +``` + +</TabItem> +</Tabs> + +Last but not least we can update the `ArticleCell` to properly display our blog posts as well: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/ArticleCell/ArticleCell.jsx" +// highlight-next-line +import Article from 'src/components/Article' + +export const QUERY = gql` + query FindArticleQuery($id: Int!) { + article: post(id: $id) { + id + title + body + createdAt + } + } +` + +export const Loading = () => <div>Loading...</div> + +export const Empty = () => <div>Empty</div> + +export const Failure = ({ error }) => ( + <div style={{ color: 'red' }}>Error: {error.message}</div> +) + +export const Success = ({ article }) => { + // highlight-next-line + return <Article article={article} /> +} +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/components/ArticleCell/ArticleCell.tsx" +// highlight-next-line +import Article from 'src/components/Article' + +import type { FindArticleQuery, FindArticleQueryVariables } from 'types/graphql' +import type { CellSuccessProps, CellFailureProps } from '@redwoodjs/web' + +export const QUERY = gql` + query FindArticleQuery($id: Int!) { + article: post(id: $id) { + id + title + body + createdAt + } + } +` + +export const Loading = () => <div>Loading...</div> + +export const Empty = () => <div>Empty</div> + +export const Failure = ({ error }: CellFailureProps) => ( + <div style={{ color: 'red' }}>Error: {error.message}</div> +) + +export const Success = ({ article }: CellSuccessProps<FindArticleQuery, FindArticleQueryVariables>) => { + // highlight-next-line + return <Article article={article} /> +} +``` + +</TabItem> +</Tabs> + +And there we go! We should be able to move back and forth between the homepage and the detail page. If you've only got one blog post then the homepage and single-article page will be identical! Head to the posts admin and create a couple more, won't you? + +![Article page showing an article](https://user-images.githubusercontent.com/300/146101296-f1d43812-45df-4f1e-a3da-4f6a085bfc08.png) + +:::info + +If you like what you've been seeing from the router, you can dive deeper into the [Redwood Router](../../router.md) guide. + +::: + +### Summary + +To recap: + +1. We created a new page to show a single post (the "detail" page). +2. We added a route to handle the `id` of the post and turn it into a route param, even coercing it into an integer. +3. We created a cell to fetch and display the post. +4. Redwood made the world a better place by making that `id` available to us at several key junctions in our code and even turning it into a number automatically. +5. We turned the actual post display into a standard React component and used it in both the homepage and new detail page. + diff --git a/docs/versioned_docs/version-7.0/tutorial/chapter2/side-quest.md b/docs/versioned_docs/version-7.0/tutorial/chapter2/side-quest.md new file mode 100644 index 000000000000..6e0e31f28066 --- /dev/null +++ b/docs/versioned_docs/version-7.0/tutorial/chapter2/side-quest.md @@ -0,0 +1,194 @@ +# Side Quest: How Redwood Works with Data + +Redwood likes GraphQL. We think it's the API of the future. Our GraphQL implementation is built with [Apollo](https://www.apollographql.com/) (on the client) and [GraphQL Yoga & Envelop](https://www.graphql-yoga.com) (on the server). Remember in our file system layout, there was a directory `api/src/functions` and a single file in there, `graphql.{js,ts}`. If you were to deploy your app to a [serverless](https://en.wikipedia.org/wiki/Serverless_computing) stack (which we will do later in the [Deployment](../chapter4/deployment.md) section), that `graphql.{js,ts}` file would be compiled into a serverless function and would become the GraphQL API endpoint. Here's how a typical GraphQL query works its way through your app: + +![Redwood Data Flow](https://user-images.githubusercontent.com/300/75402679-50bdd180-58ba-11ea-92c9-bb5a5f4da659.png) + +The front-end uses [Apollo Client](https://www.apollographql.com/docs/react/) to create a GraphQL payload sent to [GraphQL Yoga](https://www.graphql-yoga.com) and [Envelop](https://www.envelop.dev/docs), which that `graphql.{js,ts}` file acts as the entry-point to. + +The `*.sdl.{js,ts}` files in `api/src/graphql` define the GraphQL [Object](https://www.apollographql.com/docs/tutorial/schema/#object-types), [Query](https://www.apollographql.com/docs/tutorial/schema/#the-query-type) and [Mutation](https://www.apollographql.com/docs/tutorial/schema/#the-mutation-type) types and thus the interface of your API. + +Normally you would write a [resolver map](https://www.graphql-tools.com/docs/resolvers) that contains all your resolvers and explains to your GraphQL server how to map them to your SDL. But putting business logic directly in the resolver map would result in a very big file and horrible reusability, so you'd be well advised to extract all the logic out into a library of functions, import them, and call them from the resolver map, remembering to pass all the arguments through. Ugh, that's a lot of effort and boilerplate, and still doesn't result in very good reusability. + +Redwood has a better way! Remember the `api/src/services` directory? Redwood will automatically import and map resolvers from the corresponding **services** file onto your SDL. At the same time, it allows you to write those resolvers in a way that makes them easy to call as regular functions from other resolvers or services. That's a lot of awesomeness to contemplate, so let's show an example. + +Consider the following SDL JavaScript snippet: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```graphql title="api/src/graphql/posts.sdl.js" +export const schema = gql` + type Post { + id: Int! + title: String! + body: String! + createdAt: DateTime! + } + + type Query { + posts: [Post!]! + post(id: Int!): Post! + } + + input CreatePostInput { + title: String! + body: String! + } + + input UpdatePostInput { + title: String + body: String + } + + type Mutation { + createPost(input: CreatePostInput!): Post! @requireAuth + updatePost(id: Int!, input: UpdatePostInput!): Post! @requireAuth + deletePost(id: Int!): Post! @requireAuth + } +` +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```graphql title="api/src/graphql/posts.sdl.ts" +export const schema = gql` + type Post { + id: Int! + title: String! + body: String! + createdAt: DateTime! + } + + type Query { + posts: [Post!]! + post(id: Int!): Post! + } + + input CreatePostInput { + title: String! + body: String! + } + + input UpdatePostInput { + title: String + body: String + } + + type Mutation { + createPost(input: CreatePostInput!): Post! @requireAuth + updatePost(id: Int!, input: UpdatePostInput!): Post! @requireAuth + deletePost(id: Int!): Post! @requireAuth + } +` +``` + +</TabItem> +</Tabs> + +In this example, Redwood will look in `api/src/services/posts/posts.{js,ts}` for the following five resolvers: + +- `posts()` +- `post({ id })` +- `createPost({ input })` +- `updatePost({ id, input })` +- `deletePost({ id })` + +To implement these, simply export them from the services file. They will usually get your data from a database, but they can do anything you want, as long as they return the proper types that GraphQL Yoga expects based on what you defined in `posts.sdl.{js,ts}`. + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```javascript title="api/src/services/posts/posts.js" +import { db } from 'src/lib/db' + +export const posts = () => { + return db.post.findMany() +} + +export const post = ({ id }) => { + return db.post.findUnique({ + where: { id }, + }) +} + +export const createPost = ({ input }) => { + return db.post.create({ + data: input, + }) +} + +export const updatePost = ({ id, input }) => { + return db.post.update({ + data: input, + where: { id }, + }) +} + +export const deletePost = ({ id }) => { + return db.post.delete({ + where: { id }, + }) +} +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```javascript title="api/src/services/posts/posts.ts" +import { db } from 'src/lib/db' +import type { QueryResolvers, MutationResolvers } from 'types/graphql' + +export const posts: QueryResolvers['posts'] = () => { + return db.post.findMany() +} + +export const post: QueryResolvers['post'] = ({ id }) => { + return db.post.findUnique({ + where: { id }, + }) +} + +export const createPost: MutationResolvers['createPost'] = ({ input }) => { + return db.post.create({ + data: input, + }) +} + +export const updatePost: MutationResolvers['updatePost'] = ({ id, input }) => { + return db.post.update({ + data: input, + where: { id }, + }) +} + +export const deletePost: MutationResolvers['deletePost'] = ({ id }) => { + return db.post.delete({ + where: { id }, + }) +} +``` + +</TabItem> +</Tabs> + +:::info + +Yoga/Envelop assumes these functions return promises, which `db` (an instance of `PrismaClient`) does. Yoga/Envelop waits for them to resolve before responding with your query results, so you don't need to worry about `async`/`await` or mess with callbacks yourself. + +::: + +You may be wondering why we call these implementation files "services". While this example blog doesn't get complex enough to show it off, services are intended to be an abstraction **above** single database tables. For example, a more complex app may have a "billing" service that uses both a `transactions` table and a `subscriptions` table. Some of the functionality of this service may be exposed via GraphQL, but only as much as you like. + +You don't have to make each function in your service available via GraphQL—leave it out of your `Query` and `Mutation` types and it won't exist as far as GraphQL is concerned. But you could still use it yourself—services are just JavaScript functions so you can use them anywhere you'd like: + +- From another service +- In a custom lambda function +- From a completely separate, custom API + +By dividing your app into well-defined services and providing an API for those services (both for internal use **and** for GraphQL), you will naturally start to enforce separation of concerns and increases the maintainability of your codebase. + +Back to our data flow: Yoga/Envelop has called the resolver which, in our case, retrieved data from the database. Yoga/Envelop digs into the object and returns only the key/values that were asked for in the GraphQL query. It then packages up the response in a GraphQL payload and returns it to the browser. + +If you're using a Redwood **cell** then this data will be available to you in your `Success` component ready to be looped through and/or displayed like any other React component. diff --git a/docs/versioned_docs/version-7.0/tutorial/chapter3/forms.md b/docs/versioned_docs/version-7.0/tutorial/chapter3/forms.md new file mode 100644 index 000000000000..f6e09f2bd305 --- /dev/null +++ b/docs/versioned_docs/version-7.0/tutorial/chapter3/forms.md @@ -0,0 +1,1321 @@ +# Building a Form + +<div class="video-container"> + <iframe src="https://www.youtube.com/embed/b0x8an_UZ98?rel=0" frameborder="0" allow="accelerometer; autoplay; encrypted-media; gyroscope; picture-in-picture; modestbranding; showinfo=0; fullscreen"></iframe> +</div> + +Wait, don't close your browser! You had to know this was coming eventually, didn't you? And you've probably realized by now we wouldn't even have this section in the tutorial unless Redwood had figured out a way to make forms less soul-sucking than usual. In fact, Redwood might even make you _love_ building forms. + +Well, love is a strong word. _Like_ building forms? + +_Tolerate_ building them? + +We already have a form or two in our app; remember our posts scaffold? And those work pretty well! How hard can it be? (Hopefully you haven't sneaked a peek at that code—what's coming next will be much more impressive if you haven't.) + +Let's build the simplest form that still makes sense for our blog, a "Contact Us" form. + +### The Page + +```bash +yarn rw g page contact +``` + +We can put a link to Contact in our layout's header: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/layouts/BlogLayout/BlogLayout.jsx" +import { Link, routes } from '@redwoodjs/router' + +const BlogLayout = ({ children }) => { + return ( + <> + <header> + <h1> + <Link to={routes.home()}>Redwood Blog</Link> + </h1> + <nav> + <ul> + <li> + <Link to={routes.home()}>Home</Link> + </li> + <li> + <Link to={routes.about()}>About</Link> + </li> + // highlight-start + <li> + <Link to={routes.contact()}>Contact</Link> + </li> + // highlight-end + </ul> + </nav> + </header> + <main>{children}</main> + </> + ) +} + +export default BlogLayout +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/layouts/BlogLayout/BlogLayout.tsx" +import { Link, routes } from '@redwoodjs/router' + +type BlogLayoutProps = { + children?: React.ReactNode +} + +const BlogLayout = ({ children }: BlogLayoutProps) => { + return ( + <> + <header> + <h1> + <Link to={routes.home()}>Redwood Blog</Link> + </h1> + <nav> + <ul> + <li> + <Link to={routes.home()}>Home</Link> + </li> + <li> + <Link to={routes.about()}>About</Link> + </li> + // highlight-start + <li> + <Link to={routes.contact()}>Contact</Link> + </li> + // highlight-end + </ul> + </nav> + </header> + <main>{children}</main> + </> + ) +} + +export default BlogLayout +``` + +</TabItem> +</Tabs> + +And then use the `BlogLayout` for the `ContactPage` by making sure its wrapped by the same `<Set>` as the other pages in the routes file: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/Routes.jsx" +import { Router, Route, Set } from '@redwoodjs/router' +import ScaffoldLayout from 'src/layouts/ScaffoldLayout' +import BlogLayout from 'src/layouts/BlogLayout' + +const Routes = () => { + return ( + <Router> + <Set wrap={ScaffoldLayout} title="Posts" titleTo="posts" buttonLabel="New Post" buttonTo="newPost"> + <Route path="/posts/new" page={PostNewPostPage} name="newPost" /> + <Route path="/posts/{id:Int}/edit" page={PostEditPostPage} name="editPost" /> + <Route path="/posts/{id:Int}" page={PostPostPage} name="post" /> + <Route path="/posts" page={PostPostsPage} name="posts" /> + </Set> + <Set wrap={BlogLayout}> + <Route path="/article/{id:Int}" page={ArticlePage} name="article" /> + // highlight-next-line + <Route path="/contact" page={ContactPage} name="contact" /> + <Route path="/about" page={AboutPage} name="about" /> + <Route path="/" page={HomePage} name="home" /> + </Set> + <Route notfound page={NotFoundPage} /> + </Router> + ) +} + +export default Routes +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/Routes.tsx" +import { Router, Route, Set } from '@redwoodjs/router' +import ScaffoldLayout from 'src/layouts/ScaffoldLayout' +import BlogLayout from 'src/layouts/BlogLayout' + +const Routes = () => { + return ( + <Router> + <Set wrap={ScaffoldLayout} title="Posts" titleTo="posts" buttonLabel="New Post" buttonTo="newPost"> + <Route path="/posts/new" page={PostNewPostPage} name="newPost" /> + <Route path="/posts/{id:Int}/edit" page={PostEditPostPage} name="editPost" /> + <Route path="/posts/{id:Int}" page={PostPostPage} name="post" /> + <Route path="/posts" page={PostPostsPage} name="posts" /> + </Set> + <Set wrap={BlogLayout}> + <Route path="/article/{id:Int}" page={ArticlePage} name="article" /> + // highlight-next-line + <Route path="/contact" page={ContactPage} name="contact" /> + <Route path="/about" page={AboutPage} name="about" /> + <Route path="/" page={HomePage} name="home" /> + </Set> + <Route notfound page={NotFoundPage} /> + </Router> + ) +} + +export default Routes +``` + +</TabItem> +</Tabs> + +Double check that everything looks good and then let's get to the good stuff. + +### Introducing Form Helpers + +Forms in React are infamously annoying to work with. There are [Controlled Components](https://reactjs.org/docs/forms.html#controlled-components) and [Uncontrolled Components](https://reactjs.org/docs/uncontrolled-components.html) and [third party libraries](https://jaredpalmer.com/formik/) and many more workarounds to try and make forms in React as simple as they were originally intended to be in the HTML spec: an `<input>` field with a `name` attribute that gets submitted somewhere when you click a button. + +We think Redwood is a step or two in the right direction by not only freeing you from writing controlled component plumbing, but also dealing with validation and errors automatically. Let's see how it works. + +We won't be pulling any data from the database on our Contact page so we won't create a cell. Let's create the form right in the page. Redwood forms start with the...wait for it...`<Form>` tag: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/pages/ContactPage/ContactPage.jsx" +import { MetaTags } from '@redwoodjs/web' +// highlight-next-line +import { Form } from '@redwoodjs/forms' + +const ContactPage = () => { + return ( + <> + <MetaTags title="Contact" description="Contact page" /> + + // highlight-next-line + <Form></Form> + </> + ) +} + +export default ContactPage +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/pages/ContactPage/ContactPage.tsx" +import { MetaTags } from '@redwoodjs/web' +// highlight-next-line +import { Form } from '@redwoodjs/forms' + +const ContactPage = () => { + return ( + <> + <MetaTags title="Contact" description="Contact page" /> + + // highlight-next-line + <Form></Form> + </> + ) +} + +export default ContactPage +``` + +</TabItem> +</Tabs> + +Well that was anticlimactic. You can't even see it in the browser. Let's add a form field so we can at least see something. Redwood ships with several inputs and a plain text input box is the `<TextField>`. We'll also give the field a `name` attribute so that once there are multiple inputs on this page we'll know which contains which data: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/pages/ContactPage/ContactPage.jsx" +import { MetaTags } from '@redwoodjs/web' +// highlight-next-line +import { Form, TextField } from '@redwoodjs/forms' + +const ContactPage = () => { + return ( + <> + <MetaTags title="Contact" description="Contact page" /> + + <Form> + // highlight-next-line + <TextField name="input" /> + </Form> + </> + ) +} + +export default ContactPage +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/pages/ContactPage/ContactPage.tsx" +import { MetaTags } from '@redwoodjs/web' +// highlight-next-line +import { Form, TextField } from '@redwoodjs/forms' + +const ContactPage = () => { + return ( + <> + <MetaTags title="Contact" description="Contact page" /> + + <Form> + // highlight-next-line + <TextField name="input" /> + </Form> + </> + ) +} + +export default ContactPage +``` + +</TabItem> +</Tabs> + +<img src="https://user-images.githubusercontent.com/300/146102866-a1adaad2-b0b3-4bd8-b42d-4ed918bd3c82.png" /> + +Something is showing! Still, pretty boring. How about adding a submit button? + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/pages/ContactPage/ContactPage.jsx" +import { MetaTags } from '@redwoodjs/web' +// highlight-next-line +import { Form, TextField, Submit } from '@redwoodjs/forms' + +const ContactPage = () => { + return ( + <> + <MetaTags title="Contact" description="Contact page" /> + + <Form> + <TextField name="input" /> + // highlight-next-line + <Submit>Save</Submit> + </Form> + </> + ) +} + +export default ContactPage +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/pages/ContactPage/ContactPage.tsx" +import { MetaTags } from '@redwoodjs/web' +// highlight-next-line +import { Form, TextField, Submit } from '@redwoodjs/forms' + +const ContactPage = () => { + return ( + <> + <MetaTags title="Contact" description="Contact page" /> + + <Form> + <TextField name="input" /> + // highlight-next-line + <Submit>Save</Submit> + </Form> + </> + ) +} + +export default ContactPage +``` + +</TabItem> +</Tabs> + +<img src="https://user-images.githubusercontent.com/300/146102817-e2f6c020-ef64-45bb-bdbb-48a484218678.png" /> + +We have what might actually be considered a real, bonafide form here. Try typing something in and clicking "Save". Nothing blew up on the page but we have no indication that the form submitted or what happened to the data. Next we'll get the data from our fields. + +### onSubmit + +Similar to a plain HTML form we'll give `<Form>` an `onSubmit` handler. That handler will be called with a single argument—an object containing all of the submitted form fields: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/pages/ContactPage/ContactPage.jsx" +import { MetaTags } from '@redwoodjs/web' +import { Form, TextField, Submit } from '@redwoodjs/forms' + +const ContactPage = () => { + // highlight-start + const onSubmit = (data) => { + console.log(data) + } + // highlight-end + + return ( + <> + <MetaTags title="Contact" description="Contact page" /> + + // highlight-next-line + <Form onSubmit={onSubmit}> + <TextField name="input" /> + <Submit>Save</Submit> + </Form> + </> + ) +} + +export default ContactPage +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/pages/ContactPage/ContactPage.tsx" +import { MetaTags } from '@redwoodjs/web' +// highlight-next-line +import { Form, TextField, Submit, SubmitHandler } from '@redwoodjs/forms' + +// highlight-start +interface FormValues { + input: string +} +// highlight-end + +const ContactPage = () => { + // highlight-start + const onSubmit: SubmitHandler<FormValues> = (data) => { + console.log(data) + } + // highlight-end + + return ( + <> + <MetaTags title="Contact" description="Contact page" /> + + // highlight-next-line + <Form onSubmit={onSubmit}> + <TextField name="input" /> + <Submit>Save</Submit> + </Form> + </> + ) +} + +export default ContactPage +``` + +</TabItem> +</Tabs> + +Now try filling in some data and submitting, then checking out the console in Web Inspector: + +<img src="https://user-images.githubusercontent.com/300/146102943-dd0155e5-3bcb-45c5-b27f-65bfacb65c91.png" /> + +Great! Let's turn this into a more useful form by adding a couple fields. We'll rename the existing one to "name" and add "email" and "message": + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/pages/ContactPage/ContactPage.jsx" +import { MetaTags } from '@redwoodjs/web' +// highlight-next-line +import { Form, TextField, TextAreaField, Submit } from '@redwoodjs/forms' + +const ContactPage = () => { + const onSubmit = (data) => { + console.log(data) + } + + return ( + <> + <MetaTags title="Contact" description="Contact page" /> + + <Form onSubmit={onSubmit}> + // highlight-start + <TextField name="name" /> + <TextField name="email" /> + <TextAreaField name="message" /> + // highlight-end + <Submit>Save</Submit> + </Form> + </> + ) +} + +export default ContactPage +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/pages/ContactPage/ContactPage.tsx" +import { MetaTags } from '@redwoodjs/web' +// highlight-start +import { + Form, + TextField, + TextAreaField, + Submit, + SubmitHandler +} from '@redwoodjs/forms' +// highlight-end + +interface FormValues { + // highlight-start + name: string + email: string + message: string + // highlight-end +} + +const ContactPage = () => { + const onSubmit: SubmitHandler<FormValues> = (data) => { + console.log(data) + } + + return ( + <> + <MetaTags title="Contact" description="Contact page" /> + + <Form onSubmit={onSubmit}> + // highlight-start + <TextField name="name" /> + <TextField name="email" /> + <TextAreaField name="message" /> + // highlight-end + <Submit>Save</Submit> + </Form> + </> + ) +} + +export default ContactPage +``` + +</TabItem> +</Tabs> + +See the new `<TextAreaField>` component here which generates an HTML `<textarea>` but that contains Redwood's form goodness: + +<img src="https://user-images.githubusercontent.com/300/146103219-c8dc958d-ea2b-4bea-8cb8-62dcd0be6783.png" /> + +Let's add some labels: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/pages/ContactPage/ContactPage.jsx" +import { MetaTags } from '@redwoodjs/web' +import { Form, TextField, TextAreaField, Submit } from '@redwoodjs/forms' + +const ContactPage = () => { + const onSubmit = (data) => { + console.log(data) + } + + return ( + <> + <MetaTags title="Contact" description="Contact page" /> + + <Form onSubmit={onSubmit}> + // highlight-next-line + <label htmlFor="name">Name</label> + <TextField name="name" /> + + // highlight-next-line + <label htmlFor="email">Email</label> + <TextField name="email" /> + + // highlight-next-line + <label htmlFor="message">Message</label> + <TextAreaField name="message" /> + + <Submit>Save</Submit> + </Form> + </> + ) +} + +export default ContactPage +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/pages/ContactPage/ContactPage.tsx" +import { MetaTags } from '@redwoodjs/web' +import { + Form, + TextField, + TextAreaField, + Submit, + SubmitHandler +} from '@redwoodjs/forms' + +interface FormValues { + name: string + email: string + message: string +} + +const ContactPage = () => { + const onSubmit: SubmitHandler<FormValues> = (data) => { + console.log(data) + } + + return ( + <> + <MetaTags title="Contact" description="Contact page" /> + + <Form onSubmit={onSubmit}> + // highlight-next-line + <label htmlFor="name">Name</label> + <TextField name="name" /> + + // highlight-next-line + <label htmlFor="email">Email</label> + <TextField name="email" /> + + // highlight-next-line + <label htmlFor="message">Message</label> + <TextAreaField name="message" /> + + <Submit>Save</Submit> + </Form> + </> + ) +} + +export default ContactPage +``` + +</TabItem> +</Tabs> + +<img src="https://user-images.githubusercontent.com/300/146103401-b3d84a6c-091c-4ebc-a28c-f82c57561057.png" /> + +Try filling out the form and submitting and you should get a console message with all three fields now. + +### Validation + +"Okay, Redwood tutorial author," you're saying, "what's the big deal? You built up Redwood's form helpers as The Next Big Thing but there are plenty of libraries that will let me skip creating controlled inputs manually. So what?" And you're right! Anyone can fill out a form _correctly_ (although there are plenty of QA folks who would challenge that statement), but what happens when someone leaves something out, or makes a mistake, or tries to haxorz our form? Now who's going to be there to help? Redwood, that's who! + +All three of these fields should be required in order for someone to send a message to us. Let's enforce that with the standard HTML `required` attribute: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/pages/ContactPage/ContactPage.jsx" +return ( + <Form onSubmit={onSubmit}> + <label htmlFor="name">Name</label> + // highlight-next-line + <TextField name="name" required /> + + <label htmlFor="email">Email</label> + // highlight-next-line + <TextField name="email" required /> + + <label htmlFor="message">Message</label> + // highlight-next-line + <TextAreaField name="message" required /> + + <Submit>Save</Submit> + </Form> +) +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/pages/ContactPage/ContactPage.tsx" +return ( + <Form onSubmit={onSubmit}> + <label htmlFor="name">Name</label> + // highlight-next-line + <TextField name="name" required /> + + <label htmlFor="email">Email</label> + // highlight-next-line + <TextField name="email" required /> + + <label htmlFor="message">Message</label> + // highlight-next-line + <TextAreaField name="message" required /> + + <Submit>Save</Submit> + </Form> +) +``` + +</TabItem> +</Tabs> + +<img src="https://user-images.githubusercontent.com/300/146103473-ad762364-c456-49ae-8de7-3b26b10b38ff.png" /> + +Now when trying to submit there'll be message from the browser noting that a field must be filled in. This is better than nothing, but these messages can't be styled. Can we do better? + +Yes! Let's update that `required` call to instead be an object we pass to a custom attribute on Redwood form helpers called `validation`: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/pages/ContactPage/ContactPage.jsx" +return ( + <Form onSubmit={onSubmit}> + <label htmlFor="name">Name</label> + // highlight-next-line + <TextField name="name" validation={{ required: true }} /> + + <label htmlFor="email">Email</label> + // highlight-next-line + <TextField name="email" validation={{ required: true }} /> + + <label htmlFor="message">Message</label> + // highlight-next-line + <TextAreaField name="message" validation={{ required: true }} /> + + <Submit>Save</Submit> + </Form> +) +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/pages/ContactPage/ContactPage.tsx" +return ( + <Form onSubmit={onSubmit}> + <label htmlFor="name">Name</label> + // highlight-next-line + <TextField name="name" validation={{ required: true }} /> + + <label htmlFor="email">Email</label> + // highlight-next-line + <TextField name="email" validation={{ required: true }} /> + + <label htmlFor="message">Message</label> + // highlight-next-line + <TextAreaField name="message" validation={{ required: true }} /> + + <Submit>Save</Submit> + </Form> +) +``` + +</TabItem> +</Tabs> + +And now when we submit the form with blank fields...the Name field gets focus. Boring. But this is just a stepping stone to our amazing reveal! We have one more form helper component to add—the one that displays errors on a field. Oh, it just so happens that it's plain HTML so we can style it however we want! + +### `<FieldError>` + +Introducing `<FieldError>` (don't forget to include it in the `import` statement at the top): + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/pages/ContactPage/ContactPage.jsx" +import { MetaTags } from '@redwoodjs/web' +import { + // highlight-next-line + FieldError, + Form, + TextField, + TextAreaField, + Submit, +} from '@redwoodjs/forms' + +const ContactPage = () => { + const onSubmit = (data) => { + console.log(data) + } + + return ( + <> + <MetaTags title="Contact" description="Contact page" /> + + <Form onSubmit={onSubmit}> + <label htmlFor="name">Name</label> + <TextField name="name" validation={{ required: true }} /> + // highlight-next-line + <FieldError name="name" /> + + <label htmlFor="email">Email</label> + <TextField name="email" validation={{ required: true }} /> + // highlight-next-line + <FieldError name="email" /> + + <label htmlFor="message">Message</label> + <TextAreaField name="message" validation={{ required: true }} /> + // highlight-next-line + <FieldError name="message" /> + + <Submit>Save</Submit> + </Form> + </> + ) +} + +export default ContactPage +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/pages/ContactPage/ContactPage.tsx" +import { MetaTags } from '@redwoodjs/web' +import { + // highlight-next-line + FieldError, + Form, + TextField, + TextAreaField, + Submit, + SubmitHandler, +} from '@redwoodjs/forms' + +interface FormValues { + name: string + email: string + message: string +} + +const ContactPage = () => { + const onSubmit: SubmitHandler<FormValues> = (data) => { + console.log(data) + } + + return ( + <> + <MetaTags title="Contact" description="Contact page" /> + + <Form onSubmit={onSubmit}> + <label htmlFor="name">Name</label> + <TextField name="name" validation={{ required: true }} /> + // highlight-next-line + <FieldError name="name" /> + + <label htmlFor="email">Email</label> + <TextField name="email" validation={{ required: true }} /> + // highlight-next-line + <FieldError name="email" /> + + <label htmlFor="message">Message</label> + <TextAreaField name="message" validation={{ required: true }} /> + // highlight-next-line + <FieldError name="message" /> + + <Submit>Save</Submit> + </Form> + </> + ) +} + +export default ContactPage +``` + +</TabItem> +</Tabs> + +Note that the `name` attribute matches the `name` of the input field above it. That's so it knows which field to display errors for. Try submitting that form now. + +<img src="https://user-images.githubusercontent.com/300/146103580-1ebff2bb-d51d-4087-95de-3230b304e65e.png" /> + +But this is just the beginning. Let's make sure folks realize this is an error message. Remember the basic styles we added to `index.css` back at the start? There's an `.error` class in there that we can use. Set the `className` attribute on `<FieldError>`: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/pages/ContactPage/ContactPage.jsx" +import { MetaTags } from '@redwoodjs/web' +import { + FieldError, + Form, + TextField, + TextAreaField, + Submit, +} from '@redwoodjs/forms' + +const ContactPage = () => { + const onSubmit = (data) => { + console.log(data) + } + + return ( + <> + <MetaTags title="Contact" description="Contact page" /> + + <Form onSubmit={onSubmit}> + <label htmlFor="name">Name</label> + <TextField name="name" validation={{ required: true }} /> + // highlight-next-line + <FieldError name="name" className="error" /> + + <label htmlFor="email">Email</label> + <TextField name="email" validation={{ required: true }} /> + // highlight-next-line + <FieldError name="email" className="error" /> + + <label htmlFor="message">Message</label> + <TextAreaField name="message" validation={{ required: true }} /> + // highlight-next-line + <FieldError name="message" className="error" /> + + <Submit>Save</Submit> + </Form> + </> + ) +} + +export default ContactPage +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/pages/ContactPage/ContactPage.tsx" +import { MetaTags } from '@redwoodjs/web' +import { + FieldError, + Form, + TextField, + TextAreaField, + Submit, + SubmitHandler +} from '@redwoodjs/forms' + +interface FormValues { + name: string + email: string + message: string +} + +const ContactPage = () => { + const onSubmit: SubmitHandler<FormValues> = (data) => { + console.log(data) + } + + return ( + <> + <MetaTags title="Contact" description="Contact page" /> + + <Form onSubmit={onSubmit}> + <label htmlFor="name">Name</label> + <TextField name="name" validation={{ required: true }} /> + // highlight-next-line + <FieldError name="name" className="error" /> + + <label htmlFor="email">Email</label> + <TextField name="email" validation={{ required: true }} /> + // highlight-next-line + <FieldError name="email" className="error" /> + + <label htmlFor="message">Message</label> + <TextAreaField name="message" validation={{ required: true }} /> + // highlight-next-line + <FieldError name="message" className="error" /> + + <Submit>Save</Submit> + </Form> + </> + ) +} + +export default ContactPage +``` + +</TabItem> +</Tabs> + +<img src="https://user-images.githubusercontent.com/300/146104378-1066882c-1fe7-49e1-9547-44437338155d.png" /> + +You know what would be nice? If the input itself somehow displayed the fact that there was an error. Check out the `errorClassName` attributes on the inputs: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/pages/ContactPage/ContactPage.jsx" +import { MetaTags } from '@redwoodjs/web' +import { + FieldError, + Form, + TextField, + TextAreaField, + Submit, +} from '@redwoodjs/forms' + +const ContactPage = () => { + const onSubmit = (data) => { + console.log(data) + } + + return ( + <> + <MetaTags title="Contact" description="Contact page" /> + + <Form onSubmit={onSubmit}> + <label htmlFor="name">Name</label> + <TextField + name="name" + validation={{ required: true }} + // highlight-next-line + errorClassName="error" + /> + <FieldError name="name" className="error" /> + + <label htmlFor="email">Email</label> + <TextField + name="email" + validation={{ required: true }} + // highlight-next-line + errorClassName="error" + /> + <FieldError name="email" className="error" /> + + <label htmlFor="message">Message</label> + <TextAreaField + name="message" + validation={{ required: true }} + // highlight-next-line + errorClassName="error" + /> + <FieldError name="message" className="error" /> + + <Submit>Save</Submit> + </Form> + </> + ) +} + +export default ContactPage +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/pages/ContactPage/ContactPage.tsx" +import { MetaTags } from '@redwoodjs/web' +import { + FieldError, + Form, + TextField, + TextAreaField, + Submit, + SubmitHandler, +} from '@redwoodjs/forms' + +interface FormValues { + name: string + email: string + message: string +} + +const ContactPage = () => { + const onSubmit: SubmitHandler<FormValues> = (data) => { + console.log(data) + } + + return ( + <> + <MetaTags title="Contact" description="Contact page" /> + + <Form onSubmit={onSubmit}> + <label htmlFor="name">Name</label> + <TextField + name="name" + validation={{ required: true }} + // highlight-next-line + errorClassName="error" + /> + <FieldError name="name" className="error" /> + + <label htmlFor="email">Email</label> + <TextField + name="email" + validation={{ required: true }} + // highlight-next-line + errorClassName="error" + /> + <FieldError name="email" className="error" /> + + <label htmlFor="message">Message</label> + <TextAreaField + name="message" + validation={{ required: true }} + // highlight-next-line + errorClassName="error" + /> + <FieldError name="message" className="error" /> + + <Submit>Save</Submit> + </Form> + </> + ) +} + +export default ContactPage +``` + +</TabItem> +</Tabs> + +<img src="https://user-images.githubusercontent.com/300/146104498-8b24ef5c-66e7-48a2-b4ad-0432fff181dd.png" /> + +Oooo, what if the _label_ could change as well? It can, but we'll need Redwood's custom `<Label>` component for that. Note that the `htmlFor` attribute of `<label>` becomes the `name` prop on `<Label>`, just like with the other Redwood form components. And don't forget the import: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/pages/ContactPage/ContactPage.jsx" +import { MetaTags } from '@redwoodjs/web' +import { + FieldError, + Form, + // highlight-next-line + Label, + TextField, + TextAreaField, + Submit, +} from '@redwoodjs/forms' + +const ContactPage = () => { + const onSubmit = (data) => { + console.log(data) + } + + return ( + <> + <MetaTags title="Contact" description="Contact page" /> + + <Form onSubmit={onSubmit}> + // highlight-start + <Label name="name" errorClassName="error"> + Name + </Label> + // highlight-end + <TextField + name="name" + validation={{ required: true }} + errorClassName="error" + /> + <FieldError name="name" className="error" /> + + // highlight-start + <Label name="email" errorClassName="error"> + Email + </Label> + // highlight-end + <TextField + name="email" + validation={{ required: true }} + errorClassName="error" + /> + <FieldError name="email" className="error" /> + + // highlight-start + <Label name="message" errorClassName="error"> + Message + </Label> + // highlight-end + <TextAreaField + name="message" + validation={{ required: true }} + errorClassName="error" + /> + <FieldError name="message" className="error" /> + + <Submit>Save</Submit> + </Form> + </> + ) +} + +export default ContactPage +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/pages/ContactPage/ContactPage.tsx" +import { MetaTags } from '@redwoodjs/web' +import { + FieldError, + Form, + // highlight-next-line + Label, + TextField, + TextAreaField, + Submit, + SubmitHandler, +} from '@redwoodjs/forms' + +interface FormValues { + name: string + email: string + message: string +} + +const ContactPage = () => { + const onSubmit: SubmitHandler<FormValues> = (data) => { + console.log(data) + } + + return ( + <> + <MetaTags title="Contact" description="Contact page" /> + + <Form onSubmit={onSubmit}> + // highlight-start + <Label name="name" errorClassName="error"> + Name + </Label> + // highlight-end + <TextField + name="name" + validation={{ required: true }} + errorClassName="error" + /> + <FieldError name="name" className="error" /> + + // highlight-start + <Label name="email" errorClassName="error"> + Email + </Label> + // highlight-end + <TextField + name="email" + validation={{ required: true }} + errorClassName="error" + /> + <FieldError name="email" className="error" /> + + // highlight-start + <Label name="message" errorClassName="error"> + Message + </Label> + // highlight-end + <TextAreaField + name="message" + validation={{ required: true }} + errorClassName="error" + /> + <FieldError name="message" className="error" /> + + <Submit>Save</Submit> + </Form> + </> + ) +} + +export default ContactPage +``` + +</TabItem> +</Tabs> + +<img src="https://user-images.githubusercontent.com/300/146104647-25f1b2cf-a3cd-4737-aa2d-9aa984c08e39.png" /> + +:::info Error styling + +In addition to `className` and `errorClassName` you can also use `style` and `errorStyle`. Check out the [Form docs](../../forms.md) for more details on error styling. + +::: + +And notice that if you fill in something in a field that's marked as an error, the error instantly goes away! This is great feedback for our users that they're doing what we want, and they don't have to wait to click the "Save" button again just to see if what they changed is now correct. + +### Validating Input Format + +We should make sure the email field actually contains an email, by providing a `pattern`. +This is definitely not the end-all-be-all for email address validation, but for now let us pretend it's bulletproof. +Let's also change the message on the email validation to be a little more friendly: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/pages/ContactPage/ContactPage.jsx" +<TextField + name="email" + validation={{ + required: true, + // highlight-start + pattern: { + value: /^[^@]+@[^.]+\..+$/, + message: 'Please enter a valid email address', + }, + // highlight-end + }} + errorClassName="error" +/> +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/pages/ContactPage/ContactPage.tsx" +<TextField + name="email" + validation={{ + required: true, + // highlight-start + pattern: { + value: /^[^@]+@[^.]+\..+$/, + message: 'Please enter a valid email address', + }, + // highlight-end + }} + errorClassName="error" +/> +``` + +</TabItem> +</Tabs> + +<img src="https://user-images.githubusercontent.com/300/146105001-96b76f12-e011-46c3-a490-7dd51b872498.png" /> + +:::info + +When a validation error appears it will _disappear_ as soon as you fix the content of the field. You don't have to click "Submit" again to remove the error messages. This is great feedback for users (and eagle-eyed QA testers) since they receive instant feedback what they changed is now correct. + +::: + +Finally, you know what would _really_ be nice? If the fields were validated as soon as the user leaves each one so they don't fill out the whole thing and submit just to see multiple errors appear. Let's do that: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/pages/ContactPage/ContactPage.jsx" +<Form onSubmit={onSubmit} config={{ mode: 'onBlur' }}> +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/pages/ContactPage/ContactPage.tsx" +<Form onSubmit={onSubmit} config={{ mode: 'onBlur' }}> +``` + +</TabItem> +</Tabs> + +Well, what do you think? Was it worth the hype? A couple of new components and you've got forms that handle validation and wrap up submitted values in a nice data object, all for free. + +:::info + +Redwood's forms are built on top of [React Hook Form](https://react-hook-form.com/) so there is even more functionality available than we've documented here. Visit the [Form docs](../../forms.md) to learn more about all form functionalities. + +::: + +Redwood has one more trick up its sleeve when it comes to forms but we'll save that for when we're actually submitting one to the server. + +Having a contact form is great, but only if you actually get the contact somehow. Let's create a database table to hold the submitted data and create our first GraphQL mutation. diff --git a/docs/versioned_docs/version-7.0/tutorial/chapter3/saving-data.md b/docs/versioned_docs/version-7.0/tutorial/chapter3/saving-data.md new file mode 100644 index 000000000000..7b4732c5c14a --- /dev/null +++ b/docs/versioned_docs/version-7.0/tutorial/chapter3/saving-data.md @@ -0,0 +1,2047 @@ +# Saving Data + +### Add a Contact Model + +Let's add a new database table. Open up `api/db/schema.prisma` and add a Contact model after the Post model that's there now: + +```js title="api/db/schema.prisma" +datasource db { + provider = "sqlite" + url = env("DATABASE_URL") +} + +generator client { + provider = "prisma-client-js" + binaryTargets = "native" +} + +model Post { + id Int @id @default(autoincrement()) + title String + body String + createdAt DateTime @default(now()) +} + +// highlight-start +model Contact { + id Int @id @default(autoincrement()) + name String + email String + message String + createdAt DateTime @default(now()) +} +// highlight-end +``` + +:::tip + +To mark a field as optional (that is, allowing `NULL` as a value) you can suffix the datatype with a question mark, e.g. `name String?`. This will allow `name`'s value to be either a `String` or `NULL`. + +::: + +Next we create and apply a migration: + +```bash +yarn rw prisma migrate dev +``` + +We can name this one something like "create contact". + +### Create an SDL & Service + +Now we'll create the GraphQL interface to access this table. We haven't used this `generate` command yet (although the `scaffold` command did use it behind the scenes): + +```bash +yarn rw g sdl Contact +``` + +Just like the `scaffold` command, this will create a few new files under the `api` directory: + +1. `api/src/graphql/contacts.sdl.{js,ts}`: defines the GraphQL schema in GraphQL's schema definition language +2. `api/src/services/contacts/contacts.{js,ts}`: contains your app's business logic (also creates associated test files) + +If you remember our discussion in [how Redwood works with data](../chapter2/side-quest.md) you'll recall that queries and mutations in an SDL file are automatically mapped to resolvers defined in a service, so when you generate an SDL file you'll get a service file as well, since one requires the other. + +Open up `api/src/graphql/contacts.sdl.{js,ts}` and you'll see the same Query and Mutation types defined for Contact that were created for the Post scaffold. `Contact`, `CreateContactInput` and `UpdateContactInput` types, as well as a `Query` type with `contacts` and `contact`, and a `Mutation` type with `createContact`, `updateContact` and `deleteContact`. + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```graphql title="api/src/graphql/contacts.sdl.js" +export const schema = gql` + type Contact { + id: Int! + name: String! + email: String! + message: String! + createdAt: DateTime! + } + + type Query { + contacts: [Contact!]! @requireAuth + contact(id: Int!): Contact @requireAuth + } + + input CreateContactInput { + name: String! + email: String! + message: String! + } + + input UpdateContactInput { + name: String + email: String + message: String + } + + type Mutation { + createContact(input: CreateContactInput!): Contact! @requireAuth + updateContact(id: Int!, input: UpdateContactInput!): Contact! @requireAuth + deleteContact(id: Int!): Contact! @requireAuth + } +` +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```graphql title="api/src/graphql/contacts.sdl.ts" +export const schema = gql` + type Contact { + id: Int! + name: String! + email: String! + message: String! + createdAt: DateTime! + } + + type Query { + contacts: [Contact!]! @requireAuth + contact(id: Int!): Contact @requireAuth + } + + input CreateContactInput { + name: String! + email: String! + message: String! + } + + input UpdateContactInput { + name: String + email: String + message: String + } + + type Mutation { + createContact(input: CreateContactInput!): Contact! @requireAuth + updateContact(id: Int!, input: UpdateContactInput!): Contact! @requireAuth + deleteContact(id: Int!): Contact! @requireAuth + } +` +``` + +</TabItem> +</Tabs> + +The `@requireAuth` string you see after the `Query` and `Mutation` types is a [schema directive](https://www.graphql-tools.com/docs/schema-directives) which says that in order to access this GraphQL query the user is required to be authenticated. We haven't added authentication yet, so this won't have any effect—anyone will be able to query it, logged in or not, because until you add authentication the function behind `@requireAuth` always returns `true`. + +What's `CreateContactInput` and `UpdateContactInput`? Redwood follows the GraphQL recommendation of using [Input Types](https://graphql.org/graphql-js/mutations-and-input-types/) in mutations rather than listing out each and every field that can be set. Any fields required in `schema.prisma` are also required in `CreateContactInput` (you can't create a valid record without them) but nothing is explicitly required in `UpdateContactInput`. This is because you could want to update only a single field, or two fields, or all fields. The alternative would be to create separate Input types for every permutation of fields you would want to update. We felt that only having one update input type was a good compromise for optimal developer experience. + +:::info + +Redwood assumes your code won't try to set a value on any field named `id` or `createdAt` so it left those out of the Input types, but if your database allowed either of those to be set manually you can update `CreateContactInput` or `UpdateContactInput` and add them. + +::: + +Since all of the DB columns were required in the `schema.prisma` file they are marked as required in the GraphQL Types with the `!` suffix on the datatype (e.g. `name: String!`). + +:::tip + +GraphQL's SDL syntax requires an extra `!` when a field _is_ required. Remember: `schema.prisma` syntax requires an extra `?` character when a field is _not_ required. + +::: + +As described in [Side Quest: How Redwood Deals with Data](../chapter2/side-quest.md), there are no explicit resolvers defined in the SDL file. Redwood follows a simple naming convention: each field listed in the `Query` and `Mutation` types in the `sdl` file (`api/src/graphql/contacts.sdl.{js,ts}`) maps to a function with the same name in the `services` file (`api/src/services/contacts/contacts.{js,ts}`). + +:::tip + +*Psssstttt* I'll let you in on a little secret: if you just need a simple read-only SDL, you can skip creating the create/update/delete mutations by passing a flag to the SDL generator like so: + +`yarn rw g sdl Contact --no-crud` + +You'd only get a single `contacts` type to return them all. + +::: + +We'll only need `createContact` for our contact page. It accepts a single variable, `input`, that is an object that conforms to what we expect for a `CreateContactInput`, namely `{ name, email, message }`. This mutation should be able to be accessed by anyone, so we'll need to change `@requireAuth` to `@skipAuth`. This one says that authentication is *not* required and will allow anyone to anonymously send us a message. Note that having at least one schema directive is required for each `Query` and `Mutation` or you'll get an error: Redwood embraces the idea of "secure by default" meaning that we try and keep your application safe, even if you do nothing special to prevent access. In this case it's much safer to throw an error than to accidentally expose all of your users' data to the internet! + +:::info + +Serendipitously, the default schema directive of `@requireAuth` is exactly what we want for the `contacts` query that returns ALL contacts—only we, the owners of the blog, should have access to read them all. + +::: + +We're not going to let anyone update or delete a message, so we can remove those fields completely. Here's what the SDL file looks like after the changes: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```graphql title="api/src/graphql/contacts.sdl.js" +export const schema = gql` + type Contact { + id: Int! + name: String! + email: String! + message: String! + createdAt: DateTime! + } + + type Query { + contacts: [Contact!]! @requireAuth + contact(id: Int!): Contact @requireAuth + } + + input CreateContactInput { + name: String! + email: String! + message: String! + } + + // highlight-start + type Mutation { + createContact(input: CreateContactInput!): Contact! @skipAuth + } + // highlight-end +` +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```graphql title="api/src/graphql/contacts.sdl.ts" +export const schema = gql` + type Contact { + id: Int! + name: String! + email: String! + message: String! + createdAt: DateTime! + } + + type Query { + contacts: [Contact!]! @requireAuth + contact(id: Int!): Contact @requireAuth + } + + input CreateContactInput { + name: String! + email: String! + message: String! + } + + // highlight-start + type Mutation { + createContact(input: CreateContactInput!): Contact! @skipAuth + } + // highlight-end +` +``` + +</TabItem> +</Tabs> + +That's it for the SDL file, let's take a look at the service: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```js title="api/src/services/contacts/contacts.js" +import { db } from 'src/lib/db' + +export const contacts = () => { + return db.contact.findMany() +} + +export const contact = ({ id }) => { + return db.contact.findUnique({ + where: { id }, + }) +} + +export const createContact = ({ input }) => { + return db.contact.create({ + data: input, + }) +} + +export const updateContact = ({ id, input }) => { + return db.contact.update({ + data: input, + where: { id }, + }) +} + +export const deleteContact = ({ id }) => { + return db.contact.delete({ + where: { id }, + }) +} +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```js title="api/src/services/contacts/contacts.ts" +import type { QueryResolvers, MutationResolvers } from 'types/graphql' + +import { db } from 'src/lib/db' + +export const contacts: QueryResolvers['contacts'] = () => { + return db.contact.findMany() +} + +export const contact: QueryResolvers['contact'] = ({ id }) => { + return db.contact.findUnique({ + where: { id }, + }) +} + +export const createContact: MutationResolvers['createContact'] = ({ input }) => { + return db.contact.create({ + data: input, + }) +} + +export const updateContact: MutationResolvers['updateContact'] = ({ id, input }) => { + return db.contact.update({ + data: input, + where: { id }, + }) +} + +export const deleteContact: MutationResolvers['deleteContact'] = ({ id }) => { + return db.contact.delete({ + where: { id }, + }) +} +``` + +</TabItem> +</Tabs> + +Pretty simple. You can see here how the `createContact()` function expects the `input` argument and just passes that on to Prisma in the `create()` call. + +You can delete `updateContact` and `deleteContact` here if you want, but since there's no longer an accessible GraphQL field for them they can't be used by the client anyway. + +Before we plug this into the UI, let's take a look at a nifty GUI you get just by running `yarn redwood dev`. + +### GraphQL Playground + +Often it's nice to experiment and call your API in a more "raw" form before you get too far down the path of implementation only to find out something is missing. Is there a typo in the API layer or the web layer? Let's find out by accessing just the API layer. + +When you started development with `yarn redwood dev` (or `yarn rw dev`) you actually started a second process running at the same time. Open a new browser tab and head to [http://localhost:8911/graphql](http://localhost:8911/graphql) This is GraphQL Yoga's [GraphiQL](https://www.graphql-yoga.com/docs/features/graphiql), a web-based GUI for GraphQL APIs: + +<img width="1410" alt="image" src="https://user-images.githubusercontent.com/22184161/226866579-896e8edc-4ac0-48bd-80f0-2ba28da677b5.png" /> + +Not very exciting yet, but select the "Docs" tab on the top left and click on `query: Query`. + +<img width="1410" alt="image" src="https://user-images.githubusercontent.com/22184161/226866573-41697d10-a056-4e3a-add3-b940147de802.png" /> + +It's the complete schema as defined by our SDL files! The Playground will ingest these definitions and give you autocomplete hints on the left to help you build queries from scratch. Try getting the IDs of all the posts in the database; type the query at the left and then click the "Play" button to execute: + +<img width="1410" alt="image" src="https://user-images.githubusercontent.com/22184161/226866554-3daefe7f-7b4d-4503-aaa0-9895ee5bd38e.png" /> + +The GraphQL Playground is a great way to experiment with your API or troubleshoot when you come across a query or mutation that isn't behaving in the way you expect. + +### Creating a Contact + +Our GraphQL mutation is ready to go on the backend so all that's left is to invoke it on the frontend. Everything related to our form is in `ContactPage` so that's where we'll put the mutation call. First we define the mutation as a constant that we call later (this can be defined outside of the component itself, right after the `import` statements): + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/pages/ContactPage/ContactPage.jsx" +import { MetaTags } from '@redwoodjs/web' +import { + FieldError, + Form, + Label, + TextField, + TextAreaField, + Submit, +} from '@redwoodjs/forms' + +// highlight-start +const CREATE_CONTACT = gql` + mutation CreateContactMutation($input: CreateContactInput!) { + createContact(input: $input) { + id + } + } +` +// highlight-end + +const ContactPage = () => { + const onSubmit = (data) => { + console.log(data) + } + + return ( + <> + <MetaTags title="Contact" description="Contact page" /> + + <Form onSubmit={onSubmit} config={{ mode: 'onBlur' }}> + <Label name="name" errorClassName="error"> + Name + </Label> + <TextField + name="name" + validation={{ required: true }} + errorClassName="error" + /> + <FieldError name="name" className="error" /> + + <Label name="email" errorClassName="error"> + Email + </Label> + <TextField + name="email" + validation={{ + required: true, + pattern: { + value: /^[^@]+@[^.]+\..+$/, + message: 'Please enter a valid email address', + }, + }} + errorClassName="error" + /> + <FieldError name="email" className="error" /> + + <Label name="message" errorClassName="error"> + Message + </Label> + <TextAreaField + name="message" + validation={{ required: true }} + errorClassName="error" + /> + <FieldError name="message" className="error" /> + + <Submit>Save</Submit> + </Form> + </> + ) +} + +export default ContactPage +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/pages/ContactPage/ContactPage.tsx" +import { MetaTags } from '@redwoodjs/web' +import { + FieldError, + Form, + Label, + TextField, + TextAreaField, + Submit, + SubmitHandler, +} from '@redwoodjs/forms' + +// highlight-start +const CREATE_CONTACT = gql` + mutation CreateContactMutation($input: CreateContactInput!) { + createContact(input: $input) { + id + } + } +` +// highlight-end + +interface FormValues { + name: string + email: string + message: string +} + +const ContactPage = () => { + const onSubmit: SubmitHandler<FormValues> = (data) => { + console.log(data) + } + + return ( + <> + <MetaTags title="Contact" description="Contact page" /> + + <Form onSubmit={onSubmit} config={{ mode: 'onBlur' }}> + <Label name="name" errorClassName="error"> + Name + </Label> + <TextField + name="name" + validation={{ required: true }} + errorClassName="error" + /> + <FieldError name="name" className="error" /> + + <Label name="email" errorClassName="error"> + Email + </Label> + <TextField + name="email" + validation={{ + required: true, + pattern: { + value: /^[^@]+@[^.]+\..+$/, + message: 'Please enter a valid email address', + }, + }} + errorClassName="error" + /> + <FieldError name="email" className="error" /> + + <Label name="message" errorClassName="error"> + Message + </Label> + <TextAreaField + name="message" + validation={{ required: true }} + errorClassName="error" + /> + <FieldError name="message" className="error" /> + + <Submit>Save</Submit> + </Form> + </> + ) +} + +export default ContactPage +``` + +</TabItem> +</Tabs> + +We reference the `createContact` mutation we defined in the Contacts SDL passing it an `input` object which will contain the actual name, email and message values. + +Next we'll call the `useMutation` hook provided by Redwood which will allow us to execute the mutation when we're ready (don't forget to `import` it): + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/pages/ContactPage/ContactPage.jsx" +// highlight-next-line +import { MetaTags, useMutation } from '@redwoodjs/web' +import { + FieldError, + Form, + Label, + TextField, + TextAreaField, + Submit, +} from '@redwoodjs/forms' + +const CREATE_CONTACT = gql` + mutation CreateContactMutation($input: CreateContactInput!) { + createContact(input: $input) { + id + } + } +` + +const ContactPage = () => { + // highlight-next-line + const [create] = useMutation(CREATE_CONTACT) + + const onSubmit = (data) => { + console.log(data) + } + + return ( + <> + <MetaTags title="Contact" description="Contact page" /> + + <Form onSubmit={onSubmit} config={{ mode: 'onBlur' }}> + <Label name="name" errorClassName="error"> + Name + </Label> + <TextField + name="name" + validation={{ required: true }} + errorClassName="error" + /> + <FieldError name="name" className="error" /> + + <Label name="email" errorClassName="error"> + Email + </Label> + <TextField + name="email" + validation={{ + required: true, + pattern: { + value: /^[^@]+@[^.]+\..+$/, + message: 'Please enter a valid email address', + }, + }} + errorClassName="error" + /> + <FieldError name="email" className="error" /> + + <Label name="message" errorClassName="error"> + Message + </Label> + <TextAreaField + name="message" + validation={{ required: true }} + errorClassName="error" + /> + <FieldError name="message" className="error" /> + + <Submit>Save</Submit> + </Form> + </> + ) +} + +export default ContactPage +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/pages/ContactPage/ContactPage.tsx" +// highlight-next-line +import { MetaTags, useMutation } from '@redwoodjs/web' +import { + FieldError, + Form, + Label, + TextField, + TextAreaField, + Submit, + SubmitHandler, +} from '@redwoodjs/forms' + +// highlight-start +import { + CreateContactMutation, + CreateContactMutationVariables, +} from 'types/graphql' +// highlight-end + +const CREATE_CONTACT = gql` + mutation CreateContactMutation($input: CreateContactInput!) { + createContact(input: $input) { + id + } + } +` + +interface FormValues { + name: string + email: string + message: string +} + +const ContactPage = () => { + // highlight-start + const [create] = useMutation< + CreateContactMutation, + CreateContactMutationVariables + >(CREATE_CONTACT) + // highlight-end + + const onSubmit: SubmitHandler<FormValues> = (data) => { + console.log(data) + } + + return ( + <> + <MetaTags title="Contact" description="Contact page" /> + + <Form onSubmit={onSubmit} config={{ mode: 'onBlur' }}> + <Label name="name" errorClassName="error"> + Name + </Label> + <TextField + name="name" + validation={{ required: true }} + errorClassName="error" + /> + <FieldError name="name" className="error" /> + + <Label name="email" errorClassName="error"> + Email + </Label> + <TextField + name="email" + validation={{ + required: true, + pattern: { + value: /^[^@]+@[^.]+\..+$/, + message: 'Please enter a valid email address', + }, + }} + errorClassName="error" + /> + <FieldError name="email" className="error" /> + + <Label name="message" errorClassName="error"> + Message + </Label> + <TextAreaField + name="message" + validation={{ required: true }} + errorClassName="error" + /> + <FieldError name="message" className="error" /> + + <Submit>Save</Submit> + </Form> + </> + ) +} + +export default ContactPage +``` + +</TabItem> +</Tabs> + +<ShowForTs> + +:::tip Reminder about generated types + +Just a quick reminder that Redwood will automatically generate types for your GraphQL queries and mutations if you have the dev server running (or if you run `yarn rw generate types`). + +Once you define the `CreateContactMutation` (the GraphQL one), Redwood will generate the `CreateContactMutation` and `CreateContactMutationVariables` types from it for you. + +Take a look at our [Generated Types](typescript/generated-types.md) docs for a deeper dive! + +::: + +</ShowForTs> + +`create` is a function that invokes the mutation and takes an object with a `variables` key, containing another object with an `input` key. As an example, we could call it like: + +```js +create({ + variables: { + input: { + name: 'Rob', + email: 'rob@redwoodjs.com', + message: 'I love Redwood!', + }, + }, +}) +``` + +If you'll recall `<Form>` gives us all of the fields in a nice object where the key is the name of the field, which means the `data` object we're receiving in `onSubmit` is already in the proper format that we need for the `input`! + +That means we can update the `onSubmit` function to invoke the mutation with the data it receives: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/pages/ContactPage/ContactPage.jsx" +import { MetaTags, useMutation } from '@redwoodjs/web' +import { + FieldError, + Form, + Label, + TextField, + TextAreaField, + Submit, +} from '@redwoodjs/forms' + +const CREATE_CONTACT = gql` + mutation CreateContactMutation($input: CreateContactInput!) { + createContact(input: $input) { + id + } + } +` + +const ContactPage = () => { + const [create] = useMutation(CREATE_CONTACT) + + const onSubmit = (data) => { + // highlight-next-line + create({ variables: { input: data } }) + } + + return ( + <> + <MetaTags title="Contact" description="Contact page" /> + + <Form onSubmit={onSubmit} config={{ mode: 'onBlur' }}> + <Label name="name" errorClassName="error"> + Name + </Label> + <TextField + name="name" + validation={{ required: true }} + errorClassName="error" + /> + <FieldError name="name" className="error" /> + + <Label name="email" errorClassName="error"> + Email + </Label> + <TextField + name="email" + validation={{ + required: true, + pattern: { + value: /^[^@]+@[^.]+\..+$/, + message: 'Please enter a valid email address', + }, + }} + errorClassName="error" + /> + <FieldError name="email" className="error" /> + + <Label name="message" errorClassName="error"> + Message + </Label> + <TextAreaField + name="message" + validation={{ required: true }} + errorClassName="error" + /> + <FieldError name="message" className="error" /> + + <Submit>Save</Submit> + </Form> + </> + ) +} + +export default ContactPage +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/pages/ContactPage/ContactPage.tsx" +import { MetaTags, useMutation } from '@redwoodjs/web' +import { + FieldError, + Form, + Label, + TextField, + TextAreaField, + Submit, + SubmitHandler, +} from '@redwoodjs/forms' + +import { + CreateContactMutation, + CreateContactMutationVariables, +} from 'types/graphql' + +const CREATE_CONTACT = gql` + mutation CreateContactMutation($input: CreateContactInput!) { + createContact(input: $input) { + id + } + } +` + +interface FormValues { + name: string + email: string + message: string +} + +const ContactPage = () => { + const [create] = useMutation< + CreateContactMutation, + CreateContactMutationVariables + >(CREATE_CONTACT) + + const onSubmit: SubmitHandler<FormValues> = (data) => { + // highlight-next-line + create({ variables: { input: data } }) + } + + return ( + <> + <MetaTags title="Contact" description="Contact page" /> + + <Form onSubmit={onSubmit} config={{ mode: 'onBlur' }}> + <Label name="name" errorClassName="error"> + Name + </Label> + <TextField + name="name" + validation={{ required: true }} + errorClassName="error" + /> + <FieldError name="name" className="error" /> + + <Label name="email" errorClassName="error"> + Email + </Label> + <TextField + name="email" + validation={{ + required: true, + pattern: { + value: /^[^@]+@[^.]+\..+$/, + message: 'Please enter a valid email address', + }, + }} + errorClassName="error" + /> + <FieldError name="email" className="error" /> + + <Label name="message" errorClassName="error"> + Message + </Label> + <TextAreaField + name="message" + validation={{ required: true }} + errorClassName="error" + /> + <FieldError name="message" className="error" /> + + <Submit>Save</Submit> + </Form> + </> + ) +} + +export default ContactPage +``` + +</TabItem> +</Tabs> + +Try filling out the form and submitting—you should have a new Contact in the database! You can verify that with [Prisma Studio](/docs/tutorial/chapter2/getting-dynamic#prisma-studio) or [GraphQL Playground](#graphql-playground) if you were so inclined: + +<img width="1410" alt="image" src="https://user-images.githubusercontent.com/32992335/161488540-a7ad1a57-7432-4171-bd75-500eeaa17bcb.png" /> + +:::info Wait, I thought you said this was secure by default and someone couldn't view all contacts without being logged in? + +Remember: we haven't added authentication yet, so the concept of someone being logged in is meaningless right now. In order to prevent frustrating errors in a new application, the `@requireAuth` directive simply returns `true` until you setup an authentication system. At that point the directive will use real logic for determining if the user is logged in or not and behave accordingly. + +::: + +### Improving the Contact Form + +Our contact form works but it has a couple of issues at the moment: + +* Clicking the submit button multiple times will result in multiple submits +* The user has no idea if their submission was successful +* If an error was to occur on the server, we have no way of notifying the user + +Let's address these issues. + +#### Disable Save on Loading + +The `useMutation` hook returns a couple more elements along with the function to invoke it. We can destructure these as the second element in the array that's returned. The two we care about are `loading` and `error`: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/pages/ContactPage/ContactPage.jsx" +// ... + +const ContactPage = () => { + // highlight-next-line + const [create, { loading, error }] = useMutation(CREATE_CONTACT) + + const onSubmit = (data) => { + create({ variables: { input: data } }) + } + + return (...) +} + +// ... +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/pages/ContactPage/ContactPage.tsx" +// ... + +const ContactPage = () => { + // highlight-next-line + const [create, { loading, error }] = useMutation< + CreateContactMutation, + CreateContactMutationVariables + >(CREATE_CONTACT) + + const onSubmit: SubmitHandler<FormValues> = (data) => { + create({ variables: { input: data } }) + } + + return (...) +} + +// ... +``` + +</TabItem> +</Tabs> + +Now we know if the database call is still in progress by looking at `loading`. An easy fix for our multiple submit issue would be to disable the submit button if the response is still in progress. We can set the `disabled` attribute on the "Save" button to the value of `loading`: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/pages/ContactPage/ContactPage.jsx" +return ( + // ... + // highlight-next-line + <Submit disabled={loading}>Save</Submit> + // ... +) +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/pages/ContactPage/ContactPage.tsx" +return ( + // ... + // highlight-next-line + <Submit disabled={loading}>Save</Submit> + // ... +) +``` + +</TabItem> +</Tabs> + +It may be hard to see a difference in development because the submit is so fast, but you could enable network throttling via the Network tab Chrome's Web Inspector to simulate a slow connection: + +<img src="https://user-images.githubusercontent.com/300/71037869-6dc56f80-20d5-11ea-8b26-3dadb8a1ed86.png" /> + +You'll see that the "Save" button become disabled for a second or two while waiting for the response. + +#### Notification on Save + +Next, let's show a notification to let the user know their submission was successful. Redwood includes [react-hot-toast](https://react-hot-toast.com/) to quickly show a popup notification on a page. + +`useMutation` accepts an options object as a second argument. One of the options is a callback function, `onCompleted`, that will be invoked when the mutation successfully completes. We'll use that callback to invoke a `toast()` function which will add a message to be displayed in a **<Toaster>** component. + +Add the `onCompleted` callback to `useMutation` and include the **<Toaster>** component in our `return`, just before the **<Form>**: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/pages/ContactPage/ContactPage.jsx" +import { MetaTags, useMutation } from '@redwoodjs/web' +// highlight-next-line +import { toast, Toaster } from '@redwoodjs/web/toast' +import { + FieldError, + Form, + Label, + TextField, + TextAreaField, + Submit, +} from '@redwoodjs/forms' + +const CREATE_CONTACT = gql` + mutation CreateContactMutation($input: CreateContactInput!) { + createContact(input: $input) { + id + } + } +` + +const ContactPage = () => { + // highlight-start + const [create, { loading, error }] = useMutation(CREATE_CONTACT, { + onCompleted: () => { + toast.success('Thank you for your submission!') + }, + }) + // highlight-end + + const onSubmit = (data) => { + create({ variables: { input: data } }) + } + + return ( + <> + <MetaTags title="Contact" description="Contact page" /> + + // highlight-next-line + <Toaster /> + <Form onSubmit={onSubmit} config={{ mode: 'onBlur' }}> + <Label name="name" errorClassName="error"> + Name + </Label> + <TextField + name="name" + validation={{ required: true }} + errorClassName="error" + /> + <FieldError name="name" className="error" /> + + <Label name="email" errorClassName="error"> + Email + </Label> + <TextField + name="email" + validation={{ + required: true, + pattern: { + value: /^[^@]+@[^.]+\..+$/, + message: 'Please enter a valid email address', + }, + }} + errorClassName="error" + /> + <FieldError name="email" className="error" /> + + <Label name="message" errorClassName="error"> + Message + </Label> + <TextAreaField + name="message" + validation={{ required: true }} + errorClassName="error" + /> + <FieldError name="message" className="error" /> + + <Submit disabled={loading}>Save</Submit> + </Form> + </> + ) +} + +export default ContactPage +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/pages/ContactPage/ContactPage.tsx" +import { MetaTags, useMutation } from '@redwoodjs/web' +// highlight-next-line +import { toast, Toaster } from '@redwoodjs/web/toast' +import { + FieldError, + Form, + Label, + TextField, + TextAreaField, + Submit, + SubmitHandler, +} from '@redwoodjs/forms' + +import { + CreateContactMutation, + CreateContactMutationVariables, +} from 'types/graphql' + +const CREATE_CONTACT = gql` + mutation CreateContactMutation($input: CreateContactInput!) { + createContact(input: $input) { + id + } + } +` + +interface FormValues { + name: string + email: string + message: string +} + +const ContactPage = () => { + // highlight-start + const [create, { loading, error }] = useMutation< + CreateContactMutation, + CreateContactMutationVariables + >(CREATE_CONTACT, { + onCompleted: () => { + toast.success('Thank you for your submission!') + }, + }) + // highlight-end + + const onSubmit: SubmitHandler<FormValues> = (data) => { + create({ variables: { input: data } }) + } + + return ( + <> + <MetaTags title="Contact" description="Contact page" /> + + // highlight-next-line + <Toaster /> + <Form onSubmit={onSubmit} config={{ mode: 'onBlur' }}> + <Label name="name" errorClassName="error"> + Name + </Label> + <TextField + name="name" + validation={{ required: true }} + errorClassName="error" + /> + <FieldError name="name" className="error" /> + + <Label name="email" errorClassName="error"> + Email + </Label> + <TextField + name="email" + validation={{ + required: true, + pattern: { + value: /^[^@]+@[^.]+\..+$/, + message: 'Please enter a valid email address', + }, + }} + errorClassName="error" + /> + <FieldError name="email" className="error" /> + + <Label name="message" errorClassName="error"> + Message + </Label> + <TextAreaField + name="message" + validation={{ required: true }} + errorClassName="error" + /> + <FieldError name="message" className="error" /> + + <Submit disabled={loading}>Save</Submit> + </Form> + </> + ) +} + +export default ContactPage +``` + +</TabItem> +</Tabs> + +![Toast notification on successful submission](https://user-images.githubusercontent.com/300/146271487-f6b77e76-99c1-43e8-bcda-5ba3c9b03137.png) + +You can read the full documentation for Toast [here](../../toast-notifications.md). + +### Displaying Server Errors + +Next we'll inform the user of any server errors. So far we've only notified the user of _client_ errors: a field was missing or formatted incorrectly. But if we have server-side constraints in place `<Form>` can't know about those, but we still need to let the user know something went wrong. + +We have email validation on the client, but any developer worth their silicon knows [never trust the client](https://www.codebyamir.com/blog/never-trust-data-from-the-browser). Let's add the email validation into the api side as well to be sure no bad data gets into our database, even if someone somehow bypassed our client-side validation (l33t hackers do this all the time). + +:::info No server-side validation for some fields? + +Why don't we need server-side validation for the existence of name, email and message? Because GraphQL is already doing that for us! You may remember the `String!` declaration in our SDL file for the `Contact` type: that adds a constraint that those fields cannot be `null` as soon as it arrives on the api side. If it is, GraphQL would reject the request and throw an error back to us on the client. + +However, if you start using one service from within another, there would be no validation! GraphQL is only involved if an "outside" party is making a request (like a browser). If you really want to make sure that a field is present or formatted correctly, you'll need to add validation inside the Service itself. Then, no matter who is calling that service function (GraphQL or another Service) your data is guaranteed to be checked. + +We do have an additional layer of validation for free: because name, email and message were set as required in our `schema.prisma` file, the database itself will prevent any `null`s from being recorded. It's usually recommended to not rely solely on the database for input validation: what format your data should be in is a concern of your business logic, and in a Redwood app the business logic lives in the Services! + +::: + +We talked about business logic belonging in our services files and this is a perfect example. And since validating inputs is such a common requirement, Redwood once again makes our lives easier with [Service Validations](../../services.md#service-validations). + +We'll make a call to a new `validate` function to our `contacts` service, which will do the work of making sure that the `email` field is actually formatted like an email address: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```js title="api/src/services/contacts/contacts.js" +// highlight-next-line +import { validate } from '@redwoodjs/api' + +// ... + +export const createContact = ({ input }) => { + // highlight-next-line + validate(input.email, 'email', { email: true }) + return db.contact.create({ data: input }) +} +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```ts title="api/src/services/contacts/contacts.ts" +import type { QueryResolvers, MutationResolvers } from 'types/graphql' + +// highlight-next-line +import { validate } from '@redwoodjs/api' + +// ... + +export const createContact: MutationResolvers['createContact'] = ({ input }) => { + // highlight-next-line + validate(input.email, 'email', { email: true }) + return db.contact.create({ data: input }) +} +``` + +</TabItem> +</Tabs> + +That's a lot of references to `email` so let's break them down: + +1. The first argument is the value that we want to check. In this case `input` contains all our contact data and the value of `email` is the one we want to check +2. The second argument is the `name` prop from the `<TextField>`, so that we know which input field on the page has an error +3. The third argument is an object containing the **validation directives** we want to invoke. In this case it's just one, and `email: true` means we want to use the built-in email validator + +So when `createContact` is called it will first validate the inputs and only if no errors are thrown will it continue to actually create the record in the database. + +Right now we won't even be able to test our validation on the server because we're already checking that the input is formatted like an email address with the `validation` prop in `<TextField>`. Let's temporarily remove it so that the bad data will be sent up to the server: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```diff title="web/src/pages/ContactPage/ContactPage.js" + <TextField + name="email" + validation={{ + required: true, +- pattern: { +- value: /^[^@]+@[^.]+\..+$/, +- message: 'Please enter a valid email address', +- }, + }} + errorClassName="error" + /> +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```diff title="web/src/pages/ContactPage/ContactPage.tsx" + <TextField + name="email" + validation={{ + required: true, +- pattern: { +- value: /^[^@]+@[^.]+\..+$/, +- message: 'Please enter a valid email address', +- }, + }} + errorClassName="error" + /> +``` + +</TabItem> +</Tabs> + +Remember when we said that `<Form>` had one more trick up its sleeve? Here it comes! + +Add a `<FormError>` component, passing the `error` constant we got from `useMutation` and a little bit of styling to `wrapperStyle` (don't forget the `import`). We'll also pass `error` to `<Form>` so it can setup a context: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/pages/ContactPage/ContactPage.jsx" +import { MetaTags, useMutation } from '@redwoodjs/web' +import { toast, Toaster } from '@redwoodjs/web/toast' +import { + FieldError, + Form, + // highlight-next-line + FormError, + Label, + TextField, + TextAreaField, + Submit, +} from '@redwoodjs/forms' + +const CREATE_CONTACT = gql` + mutation CreateContactMutation($input: CreateContactInput!) { + createContact(input: $input) { + id + } + } +` + +const ContactPage = () => { + const [create, { loading, error }] = useMutation(CREATE_CONTACT, { + onCompleted: () => { + toast.success('Thank you for your submission!') + }, + }) + + const onSubmit = (data) => { + create({ variables: { input: data } }) + } + + return ( + <> + <MetaTags title="Contact" description="Contact page" /> + + <Toaster /> + // highlight-start + <Form onSubmit={onSubmit} config={{ mode: 'onBlur' }} error={error}> + <FormError error={error} wrapperClassName="form-error" /> + // highlight-end + + <Label name="name" errorClassName="error"> + Name + </Label> + <TextField + name="name" + validation={{ required: true }} + errorClassName="error" + /> + <FieldError name="name" className="error" /> + + <Label name="email" errorClassName="error"> + Email + </Label> + <TextField + name="email" + validation={{ + required: true, + }} + errorClassName="error" + /> + <FieldError name="email" className="error" /> + + <Label name="message" errorClassName="error"> + Message + </Label> + <TextAreaField + name="message" + validation={{ required: true }} + errorClassName="error" + /> + <FieldError name="message" className="error" /> + + <Submit disabled={loading}>Save</Submit> + </Form> + </> + ) +} + +export default ContactPage +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/pages/ContactPage/ContactPage.tsx" +import { MetaTags, useMutation } from '@redwoodjs/web' +import { toast, Toaster } from '@redwoodjs/web/toast' +import { + FieldError, + Form, + // highlight-next-line + FormError, + Label, + TextField, + TextAreaField, + Submit, + SubmitHandler, +} from '@redwoodjs/forms' + +import { + CreateContactMutation, + CreateContactMutationVariables, +} from 'types/graphql' + +const CREATE_CONTACT = gql` + mutation CreateContactMutation($input: CreateContactInput!) { + createContact(input: $input) { + id + } + } +` + +interface FormValues { + name: string + email: string + message: string +} + +const ContactPage = () => { + const [create, { loading, error }] = useMutation< + CreateContactMutation, + CreateContactMutationVariables + >(CREATE_CONTACT, { + onCompleted: () => { + toast.success('Thank you for your submission!') + }, + }) + + const onSubmit: SubmitHandler<FormValues> = (data) => { + create({ variables: { input: data } }) + } + + return ( + <> + <MetaTags title="Contact" description="Contact page" /> + + <Toaster /> + // highlight-start + <Form onSubmit={onSubmit} config={{ mode: 'onBlur' }} error={error}> + <FormError error={error} wrapperClassName="form-error" /> + // highlight-end + + <Label name="name" errorClassName="error"> + Name + </Label> + <TextField + name="name" + validation={{ required: true }} + errorClassName="error" + /> + <FieldError name="name" className="error" /> + + <Label name="email" errorClassName="error"> + Email + </Label> + <TextField + name="email" + validation={{ + required: true, + }} + errorClassName="error" + /> + <FieldError name="email" className="error" /> + + <Label name="message" errorClassName="error"> + Message + </Label> + <TextAreaField + name="message" + validation={{ required: true }} + errorClassName="error" + /> + <FieldError name="message" className="error" /> + + <Submit disabled={loading}>Save</Submit> + </Form> + </> + ) +} + +export default ContactPage +``` + +</TabItem> +</Tabs> + +Now submit a message with an invalid email address: + +![Email error from the server side](https://user-images.githubusercontent.com/300/158897801-8a3f7ae8-6e67-4fc0-b828-3095c264507e.png) + +We get that error message at the top saying something went wrong in plain English _and_ the actual field is highlighted for us, just like the inline validation! The message at the top may be overkill for such a short form, but it can be key if a form is multiple screens long; the user gets a summary of what went wrong all in one place and they don't have to resort to hunting through a long form looking for red boxes. You don't *have* to use that message box at the top, though; just remove `<FormError>` and the field will still be highlighted as expected. + +:::info + +`<FormError>` has several styling options which are attached to different parts of the message: + +* `wrapperStyle` / `wrapperClassName`: the container for the entire message +* `titleStyle` / `titleClassName`: the "Errors prevented this form..." title +* `listStyle` / `listClassName`: the `<ul>` that contains the list of errors +* `listItemStyle` / `listItemClassName`: each individual `<li>` around each error + +::: + +This just scratches the surface of what Service Validations can do. You can perform more complex validations, including combining multiple directives in a single call. What if we had a model representing a `Car`, and users could submit them to us for sale on our exclusive car shopping site. How do we make sure we only get the cream of the crop of motorized vehicles? Service validations would allow us to be very particular about the values someone would be allowed to submit, all without any custom checks, just built-in `validate()` calls: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```js +export const createCar = ({ input }) => { + validate(input.make, 'make', { + inclusion: ['Audi', 'BMW', 'Ferrari', 'Lexus', 'Tesla'], + }) + validate(input.color, 'color', { + exclusion: { in: ['Beige', 'Mauve'], message: "No one wants that color" } + }) + validate(input.hasDamage, 'hasDamage', { + absence: true + }) + validate(input.vin, 'vin', { + format: /[A-Z0-9]+/, + length: { equal: 17 } + }) + validate(input.odometer, 'odometer', { + numericality: { positive: true, lessThanOrEqual: 10000 } + }) + + return db.car.create({ data: input }) +} +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```ts +export const createCar = ({ input }: Car) => { + validate(input.make, 'make', { + inclusion: ['Audi', 'BMW', 'Ferrari', 'Lexus', 'Tesla'], + }) + validate(input.color, 'color', { + exclusion: { in: ['Beige', 'Mauve'], message: "No one wants that color" } + }) + validate(input.hasDamage, 'hasDamage', { + absence: true + }) + validate(input.vin, 'vin', { + format: /[A-Z0-9]+/, + length: { equal: 17 } + }) + validate(input.odometer, 'odometer', { + numericality: { positive: true, lessThanOrEqual: 10000 } + }) + + return db.car.create({ data: input }) +} +``` + +</TabItem> +</Tabs> + +You can still include your own custom validation logic and have the errors handled in the same manner as the built-in validations: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```js +validateWith(() => { + const oneWeekAgo = new Date() + oneWeekAgo.setDate(oneWeekAgo.getDate() - 7) + + if (input.lastCarWashDate < oneWeekAgo) { + throw new Error("We don't accept dirty cars") + } +}) +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```js +validateWith(() => { + const oneWeekAgo = new Date() + oneWeekAgo.setDate(oneWeekAgo.getDate() - 7) + + if (input.lastCarWashDate < oneWeekAgo) { + throw new Error("We don't accept dirty cars") + } +}) +``` + +</TabItem> +</Tabs> + +Now you can be sure you won't be getting some old jalopy! + +### One more thing... + +Since we're not redirecting after the form submits, we should at least clear out the form fields. This requires we get access to a `reset()` function that's part of [React Hook Form](https://react-hook-form.com/), but we don't have access to it with the basic usage of `<Form>` (like we're currently using). + +Redwood includes a hook called `useForm()` (from React Hook Form) which is normally called for us within `<Form>`. In order to reset the form we need to invoke that hook ourselves. But the functionality that `useForm()` provides still needs to be used in `Form`. Here's how we do that. + +First we'll import `useForm`: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/pages/ContactPage/ContactPage.jsx" +import { + FieldError, + Form, + FormError, + Label, + Submit, + TextAreaField, + TextField, + // highlight-next-line + useForm, +} from '@redwoodjs/forms' +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/pages/ContactPage/ContactPage.tsx" +import { + FieldError, + Form, + FormError, + Label, + Submit, + TextAreaField, + TextField, + // highlight-next-line + useForm, +} from '@redwoodjs/forms' +``` + +</TabItem> +</Tabs> + +And now call it inside of our component: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/pages/ContactPage/ContactPage.jsx" +const ContactPage = () => { + // highlight-next-line + const formMethods = useForm() + //... +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/pages/ContactPage/ContactPage.tsx" +const ContactPage = () => { + // highlight-next-line + const formMethods = useForm() + //... +``` + +</TabItem> +</Tabs> + +Finally we'll tell `<Form>` to use the `formMethods` we just got from `useForm()` instead of doing it itself: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/pages/ContactPage/ContactPage.jsx" +return ( + <> + <Toaster /> + <Form + onSubmit={onSubmit} + config={{ mode: 'onBlur' }} + error={error} + // highlight-next-line + formMethods={formMethods} + > + // ... +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/pages/ContactPage/ContactPage.tsx" +return ( + <> + <Toaster /> + <Form + onSubmit={onSubmit} + config={{ mode: 'onBlur' }} + error={error} + // highlight-next-line + formMethods={formMethods} + > + // ... +``` + +</TabItem> +</Tabs> + +Now we can call `reset()` on `formMethods` after we call `toast()`: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/pages/ContactPage/ContactPage.jsx" +// ... + +const [create, { loading, error }] = useMutation(CREATE_CONTACT, { + onCompleted: () => { + toast.success('Thank you for your submission!') + // highlight-next-line + formMethods.reset() + }, +}) + +// ... +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/pages/ContactPage/ContactPage.tsx" +// ... + +const [create, { loading, error }] = useMutation< + CreateContactMutation, + CreateContactMutationVariables +>(CREATE_CONTACT, { + onCompleted: () => { + toast.success('Thank you for your submission!') + // highlight-next-line + formMethods.reset() + }, +}) + +// ... +``` + +</TabItem> +</Tabs> + +:::warning + +You can put the email validation back into the `<TextField>` now, but you should leave the server validation in place, just in case. + +::: + +Here's the entire page: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/pages/ContactPage/ContactPage.jsx" +import { MetaTags, useMutation } from '@redwoodjs/web' +import { toast, Toaster } from '@redwoodjs/web/toast' +import { + FieldError, + Form, + FormError, + Label, + Submit, + TextAreaField, + TextField, + useForm, +} from '@redwoodjs/forms' + +const CREATE_CONTACT = gql` + mutation CreateContactMutation($input: CreateContactInput!) { + createContact(input: $input) { + id + } + } +` + +const ContactPage = () => { + const formMethods = useForm() + + const [create, { loading, error }] = useMutation(CREATE_CONTACT, { + onCompleted: () => { + toast.success('Thank you for your submission!') + formMethods.reset() + }, + }) + + const onSubmit = (data) => { + create({ variables: { input: data } }) + } + + return ( + <> + <MetaTags title="Contact" description="Contact page" /> + + <Toaster /> + <Form + onSubmit={onSubmit} + config={{ mode: 'onBlur' }} + error={error} + formMethods={formMethods} + > + <FormError error={error} wrapperClassName="form-error" /> + + <Label name="name" errorClassName="error"> + Name + </Label> + <TextField + name="name" + validation={{ required: true }} + errorClassName="error" + /> + <FieldError name="name" className="error" /> + + <Label name="email" errorClassName="error"> + Email + </Label> + <TextField + name="email" + validation={{ + required: true, + pattern: { + value: /^[^@]+@[^.]+\..+$/, + message: 'Please enter a valid email address', + }, + }} + errorClassName="error" + /> + <FieldError name="email" className="error" /> + + <Label name="message" errorClassName="error"> + Message + </Label> + <TextAreaField + name="message" + validation={{ required: true }} + errorClassName="error" + /> + <FieldError name="message" className="error" /> + + <Submit disabled={loading}>Save</Submit> + </Form> + </> + ) +} + +export default ContactPage +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/pages/ContactPage/ContactPage.tsx" +import { MetaTags, useMutation } from '@redwoodjs/web' +import { toast, Toaster } from '@redwoodjs/web/toast' +import { + FieldError, + Form, + FormError, + Label, + Submit, + SubmitHandler, + TextAreaField, + TextField, + useForm, +} from '@redwoodjs/forms' + +import { + CreateContactMutation, + CreateContactMutationVariables, +} from 'types/graphql' + +const CREATE_CONTACT = gql` + mutation CreateContactMutation($input: CreateContactInput!) { + createContact(input: $input) { + id + } + } +` + +interface FormValues { + name: string + email: string + message: string +} + +const ContactPage = () => { + const formMethods = useForm() + + const [create, { loading, error }] = useMutation< + CreateContactMutation, + CreateContactMutationVariables + >(CREATE_CONTACT, { + onCompleted: () => { + toast.success('Thank you for your submission!') + formMethods.reset() + }, + }) + + const onSubmit: SubmitHandler<FormValues> = (data) => { + create({ variables: { input: data } }) + } + + return ( + <> + <MetaTags title="Contact" description="Contact page" /> + + <Toaster /> + <Form + onSubmit={onSubmit} + config={{ mode: 'onBlur' }} + error={error} + formMethods={formMethods} + > + <FormError error={error} wrapperClassName="form-error" /> + + <Label name="name" errorClassName="error"> + Name + </Label> + <TextField + name="name" + validation={{ required: true }} + errorClassName="error" + /> + <FieldError name="name" className="error" /> + + <Label name="email" errorClassName="error"> + Email + </Label> + <TextField + name="email" + validation={{ + required: true, + pattern: { + value: /^[^@]+@[^.]+\..+$/, + message: 'Please enter a valid email address', + }, + }} + errorClassName="error" + /> + <FieldError name="email" className="error" /> + + <Label name="message" errorClassName="error"> + Message + </Label> + <TextAreaField + name="message" + validation={{ required: true }} + errorClassName="error" + /> + <FieldError name="message" className="error" /> + + <Submit disabled={loading}>Save</Submit> + </Form> + </> + ) +} + +export default ContactPage +``` + +</TabItem> +</Tabs> + +That's it! [React Hook Form](https://react-hook-form.com/) provides a bunch of [functionality](https://react-hook-form.com/docs) that `<Form>` doesn't expose. When you want to get to that functionality you can call `useForm()` yourself, but make sure to pass the returned object (we called it `formMethods`) as a prop to `<Form>` so that the validation and other functionality keeps working. + +:::info + +You may have noticed that the onBlur form config stopped working once you started calling `useForm()` yourself. That's because Redwood calls `useForm()` behind the scenes and automatically passes it the `config` prop that you gave to `<Form>`. Redwood is no longer calling `useForm()` for you so if you need some options passed you need to do it manually: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/pages/ContactPage/ContactPage.jsx" +const ContactPage = () => { + const formMethods = useForm({ mode: 'onBlur' }) + //... +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/pages/ContactPage/ContactPage.tsx" +const ContactPage = () => { + const formMethods = useForm({ mode: 'onBlur' }) + //... +``` + +</TabItem> +</Tabs> + +::: + +The public site is looking pretty good. How about the administrative features that let us create and edit posts? We should move them to some kind of admin section and put them behind a login so that random users poking around at URLs can't create ads for discount pharmaceuticals. diff --git a/docs/versioned_docs/version-7.0/tutorial/chapter4/authentication.md b/docs/versioned_docs/version-7.0/tutorial/chapter4/authentication.md new file mode 100644 index 000000000000..c42b3fb7d393 --- /dev/null +++ b/docs/versioned_docs/version-7.0/tutorial/chapter4/authentication.md @@ -0,0 +1,899 @@ +# Authentication + +## An Admin Section + +Having the admin screens at `/admin` is a reasonable thing to do. Let's update the routes to make that happen by updating the four routes where the URL begins with `/posts` to start with `/admin/posts` instead: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/Routes.jsx" +import { Router, Route, Set } from '@redwoodjs/router' +import ScaffoldLayout from 'src/layouts/ScaffoldLayout' +import BlogLayout from 'src/layouts/BlogLayout' + +const Routes = () => { + return ( + <Router> + <Set wrap={ScaffoldLayout} title="Posts" titleTo="posts" buttonLabel="New Post" buttonTo="newPost"> + // highlight-start + <Route path="/admin/posts/new" page={PostNewPostPage} name="newPost" /> + <Route path="/admin/posts/{id:Int}/edit" page={PostEditPostPage} name="editPost" /> + <Route path="/admin/posts/{id:Int}" page={PostPostPage} name="post" /> + <Route path="/admin/posts" page={PostPostsPage} name="posts" /> + // highlight-end + </Set> + <Set wrap={BlogLayout}> + <Route path="/article/{id:Int}" page={ArticlePage} name="article" /> + <Route path="/contact" page={ContactPage} name="contact" /> + <Route path="/about" page={AboutPage} name="about" /> + <Route path="/" page={HomePage} name="home" /> + </Set> + <Route notfound page={NotFoundPage} /> + </Router> + ) +} + +export default Routes +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/Routes.tsx" +import { Router, Route, Set } from '@redwoodjs/router' +import ScaffoldLayout from 'src/layouts/ScaffoldLayout' +import BlogLayout from 'src/layouts/BlogLayout' + +const Routes = () => { + return ( + <Router> + <Set wrap={ScaffoldLayout} title="Posts" titleTo="posts" buttonLabel="New Post" buttonTo="newPost"> + // highlight-start + <Route path="/admin/posts/new" page={PostNewPostPage} name="newPost" /> + <Route path="/admin/posts/{id:Int}/edit" page={PostEditPostPage} name="editPost" /> + <Route path="/admin/posts/{id:Int}" page={PostPostPage} name="post" /> + <Route path="/admin/posts" page={PostPostsPage} name="posts" /> + // highlight-end + </Set> + <Set wrap={BlogLayout}> + <Route path="/article/{id:Int}" page={ArticlePage} name="article" /> + <Route path="/contact" page={ContactPage} name="contact" /> + <Route path="/about" page={AboutPage} name="about" /> + <Route path="/" page={HomePage} name="home" /> + </Set> + <Route notfound page={NotFoundPage} /> + </Router> + ) +} + +export default Routes +``` + +</TabItem> +</Tabs> + +Head to [http://localhost:8910/admin/posts](http://localhost:8910/admin/posts) and our generated scaffold page should come up. Thanks to named routes we don't have to update any of the `<Link>`s that were generated by the scaffolds since the `name`s of the pages didn't change! + +Having the admin at a different path is great, but nothing is stopping someone from just browsing to that new path and messing with our blog posts. How do we keep prying eyes away? + +## Authentication + +"Authentication" is a blanket term for all of the stuff that goes into making sure that a user, often identified with an email address and password, is allowed to access something. Authentication can be [famously fickle](https://www.rdegges.com/2017/authentication-still-sucks/) to do right both from a technical and developer-happiness standpoint. + +"Credentials" are the pieces of information a user provides to prove they are who they say they are: commonly a username (usually email) and password. + +Redwood includes two authentication paths out of the box: + +* Self-hosted, where user credentials are stored in your own database +* Third-party hosted, where user credentials are stored with the third party + +In both cases you end up with an authenticated user that you can access in both the web and api sides of your app. + +Redwood includes [integrations](../../authentication.md) for several of the most popular third-party auth providers: + +- [Auth0](https://auth0.com/) +- [Clerk](https://clerk.dev/) +- [Netlify Identity](https://docs.netlify.com/visitor-access/identity/) +- [Firebase's GoogleAuthProvider](https://firebase.google.com/docs/reference/js/v8/firebase.auth.GoogleAuthProvider) +- [Supabase](https://supabase.io/docs/guides/auth) +- [SuperTokens](https://supertokens.com) + +As for our blog, we're going to use self-hosted authentication (named *dbAuth* in Redwood) since it's the simplest to get started with and doesn't involve any third party signups. + +:::info Authentication vs. Authorization + +There are two terms which contain a lot of letters, starting with an "A" and ending in "ation" (which means you could rhyme them if you wanted to) that become involved in most discussions about login: + +* Authentication +* Authorization + +Here is how Redwood uses these terms: + +* **Authentication** deals with determining whether someone is who they say they are, generally by "logging in" with an email and password, or a third party provider like Auth0. +* **Authorization** is whether a user (who has usually already been authenticated) is allowed to do something they want to do. This generally involves some combination of roles and permission checking before allowing access to a URL or feature of your site. + +This section of the tutorial focuses on **Authentication** only. See [chapter 7 of the tutorial](../chapter7/rbac.md) to learn about Authorization in Redwood. + +::: + +## Auth Setup + +As you probably have guessed, Redwood has a couple of generators to get you going. One installs the backend components needed for dbAuth, the other creates login, signup and forgot password pages. + +Run this setup command to get the internals of dbAuth added to our app: + +```bash +yarn rw setup auth dbAuth +``` + +When prompted to "Enable WebAuthn support", pick no—this is a separate piece of functionality we won't need for the tutorial. You'll see that the process creates several files and includes some post-install instructions for the last couple of customizations you'll need to make. Let's go through them now. + +### Create a User Model + +First we'll need to add a couple of fields to our `User` model. We don't even have a `User` model yet, so we'll create one along with the required fields at the same time. + +Open up `schema.prisma` and add: + +```javascript title="api/db/schema.prisma" +datasource db { + provider = "sqlite" + url = env("DATABASE_URL") +} + +generator client { + provider = "prisma-client-js" + binaryTargets = "native" +} + +model Post { + id Int @id @default(autoincrement()) + title String + body String + createdAt DateTime @default(now()) +} + +model Contact { + id Int @id @default(autoincrement()) + name String + email String + message String + createdAt DateTime @default(now()) +} + +// highlight-start +model User { + id Int @id @default(autoincrement()) + name String? + email String @unique + hashedPassword String + salt String + resetToken String? + resetTokenExpiresAt DateTime? +} +// highlight-end +``` + +This gives us a user with a name and email, as well as four fields that dbAuth will control: + +* **hashedPassword**: stores the result of combining the user's password with a `salt` and then [hashed](https://searchsqlserver.techtarget.com/definition/hashing) +* **salt**: a unique string that combines with the hashedPassword to prevent [rainbow table attacks](https://dev.to/salothom/rainbow-tables-why-to-add-salt-45l9) +* **resetToken**: if the user forgets their password, dbAuth inserts a token in here that must be present when the user returns to reset their password +* **resetTokenExpiresAt**: a timestamp after which the `resetToken` will be considered expired and no longer valid (the user will need to fill out the forgot password form again) + +Let's create the user model by migrating the database, naming it something like "create user": + +```bash +yarn rw prisma migrate dev +``` + +That's it for the database setup! + +## Private Routes + +Try reloading the Posts admin and we'll see something that's 50% correct: + +![image](https://user-images.githubusercontent.com/300/146462761-d21c93f0-289a-4e11-bccf-8e4e68f21438.png) + +Going to the admin section now prevents a non-logged in user from seeing posts, great! This is the result of the `@requireAuth` directive in `api/src/graphql/posts.sdl.{js,ts}`: you're not authenticated so GraphQL will not respond to your request for data. But, ideally they wouldn't be able to see the admin pages themselves. Let's fix that with a new component in the Routes file, `<PrivateSet>`: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/Routes.jsx" +// highlight-next-line +import { PrivateSet, Router, Route, Set } from '@redwoodjs/router' + +import ScaffoldLayout from 'src/layouts/ScaffoldLayout' +import BlogLayout from 'src/layouts/BlogLayout' + +import { useAuth } from './auth' + +const Routes = () => { + return ( + <Router useAuth={useAuth}> + // highlight-next-line + <PrivateSet unauthenticated="home"> + <Set wrap={ScaffoldLayout} title="Posts" titleTo="posts" buttonLabel="New Post" buttonTo="newPost"> + <Route path="/admin/posts/new" page={PostNewPostPage} name="newPost" /> + <Route path="/admin/posts/{id:Int}/edit" page={PostEditPostPage} name="editPost" /> + <Route path="/admin/posts/{id:Int}" page={PostPostPage} name="post" /> + <Route path="/admin/posts" page={PostPostsPage} name="posts" /> + </Set> + // highlight-next-line + </PrivateSet> + <Set wrap={BlogLayout}> + <Route path="/article/{id:Int}" page={ArticlePage} name="article" /> + <Route path="/contact" page={ContactPage} name="contact" /> + <Route path="/about" page={AboutPage} name="about" /> + <Route path="/" page={HomePage} name="home" /> + </Set> + <Route notfound page={NotFoundPage} /> + </Router> + ) +} + +export default Routes +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/Routes.tsx" +// highlight-next-line +import { PrivateSet, Router, Route, Set } from '@redwoodjs/router' + +import ScaffoldLayout from 'src/layouts/ScaffoldLayout' +import BlogLayout from 'src/layouts/BlogLayout' + +import { useAuth } from './auth' + +const Routes = () => { + return ( + <Router useAuth={useAuth}> + // highlight-next-line + <PrivateSet unauthenticated="home"> + <Set wrap={ScaffoldLayout} title="Posts" titleTo="posts" buttonLabel="New Post" buttonTo="newPost"> + <Route path="/admin/posts/new" page={PostNewPostPage} name="newPost" /> + <Route path="/admin/posts/{id:Int}/edit" page={PostEditPostPage} name="editPost" /> + <Route path="/admin/posts/{id:Int}" page={PostPostPage} name="post" /> + <Route path="/admin/posts" page={PostPostsPage} name="posts" /> + </Set> + // highlight-next-line + </PrivateSet> + <Set wrap={BlogLayout}> + <Route path="/article/{id:Int}" page={ArticlePage} name="article" /> + <Route path="/contact" page={ContactPage} name="contact" /> + <Route path="/about" page={AboutPage} name="about" /> + <Route path="/" page={HomePage} name="home" /> + </Set> + <Route notfound page={NotFoundPage} /> + </Router> + ) +} + +export default Routes +``` + +</TabItem> +</Tabs> + +We wrap the routes we want to be private (that is, only accessible when logged in) in the `<PrivateSet>` component, and tell our app where to send them if they are unauthenticated. In this case they should go to the `home` route. + +Try going back to [http://localhost:8910/admin/posts](http://localhost:8910/admin/posts) now and—yikes! + +![Homepage showing user does not have permission to view](https://user-images.githubusercontent.com/300/146463430-f7bc7fc9-a966-4149-9cb6-382d89d9d636.png) + +Well, we couldn't get to the admin pages, but we also can't see our blog posts any more. Do you know why we're seeing the same message here that we saw in the posts admin page? + +It's because the `posts` query in `posts.sdl.{js,ts}` is used by both the homepage *and* the posts admin page. Since it has the `@requireAuth` directive, it's locked down and can only be accessed when logged in. But we *do* want people that aren't logged in to be able to view the posts on the homepage! + +Now that our admin pages are behind a `<PrivateSet>` route, what if we set the `posts` query to be `@skipAuth` instead? Let's try: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```graphql title="api/src/graphql/posts.sdl.js" +export const schema = gql` + type Post { + id: Int! + title: String! + body: String! + createdAt: DateTime! + } + + type Query { + // highlight-next-line + posts: [Post!]! @skipAuth + post(id: Int!): Post @requireAuth + } + + input CreatePostInput { + title: String! + body: String! + } + + input UpdatePostInput { + title: String + body: String + } + + type Mutation { + createPost(input: CreatePostInput!): Post! @requireAuth + updatePost(id: Int!, input: UpdatePostInput!): Post! @requireAuth + deletePost(id: Int!): Post! @requireAuth + } +` +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```graphql title="api/src/graphql/posts.sdl.ts" +export const schema = gql` + type Post { + id: Int! + title: String! + body: String! + createdAt: DateTime! + } + + type Query { + // highlight-next-line + posts: [Post!]! @skipAuth + post(id: Int!): Post @requireAuth + } + + input CreatePostInput { + title: String! + body: String! + } + + input UpdatePostInput { + title: String + body: String + } + + type Mutation { + createPost(input: CreatePostInput!): Post! @requireAuth + updatePost(id: Int!, input: UpdatePostInput!): Post! @requireAuth + deletePost(id: Int!): Post! @requireAuth + } +` +``` + +</TabItem> +</Tabs> + +Reload the homepage and: + +![image](https://user-images.githubusercontent.com/300/146463788-7ab8afbb-8cd8-4c16-b8d2-02a00bcd7b46.png) + +They're back! Let's just check that if we click on one of our posts that we can see it... UGH: + +![image](https://user-images.githubusercontent.com/300/146463841-cb9c95b6-3cc8-4697-9056-97fdebb49c51.png) + +This page shows a single post, using the `post` query, not `posts`! So, we need to `@skipAuth` on that one as well: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```graphql title="api/src/graphql/posts.sdl.js" +export const schema = gql` + type Post { + id: Int! + title: String! + body: String! + createdAt: DateTime! + } + + type Query { + posts: [Post!]! @skipAuth + // highlight-next-line + post(id: Int!): Post @skipAuth + } + + input CreatePostInput { + title: String! + body: String! + } + + input UpdatePostInput { + title: String + body: String + } + + type Mutation { + createPost(input: CreatePostInput!): Post! @requireAuth + updatePost(id: Int!, input: UpdatePostInput!): Post! @requireAuth + deletePost(id: Int!): Post! @requireAuth + } +` +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```graphql title="api/src/graphql/posts.sdl.ts" +export const schema = gql` + type Post { + id: Int! + title: String! + body: String! + createdAt: DateTime! + } + + type Query { + posts: [Post!]! @skipAuth + // highlight-next-line + post(id: Int!): Post @skipAuth + } + + input CreatePostInput { + title: String! + body: String! + } + + input UpdatePostInput { + title: String + body: String + } + + type Mutation { + createPost(input: CreatePostInput!): Post! @requireAuth + updatePost(id: Int!, input: UpdatePostInput!): Post! @requireAuth + deletePost(id: Int!): Post! @requireAuth + } +` +``` + +</TabItem> +</Tabs> + +Cross your fingers and reload! + +![image](https://user-images.githubusercontent.com/300/146463959-c59c8721-484f-45de-a663-e6ab3b2591dc.png) + +We're back in business! Once you add authentication into your app you'll probably run into several situations like this where you need to go back and forth, re-allowing access to some pages or queries that inadvertently got locked down by default. Remember, Redwood is secure by default—we'd rather you accidentally expose too *little* of your app than too *much*! + +Now that our pages are behind login, let's actually create a login page so that we can see them again. + +:::info Skipping auth altogether for `posts` and `post` feels bad somehow... + +Ahh, good eye. While posts don't currently expose any particularly secret information, what if we eventually add a field like `publishStatus` where you could mark a post as `draft` so that it doesn't show on the homepage. But, if you knew enough about GraphQL, you could easily request all posts in the database and be able to read all the drafts! + +It would be more future-proof to create a *new* endpoint for public display of posts, something like `publicPosts` and `publicPost` that will have built-in logic to only ever return a minimal amount of data and leave the default `posts` and `post` queries returning all the data for a post, something that only the admin will have access to. (Or do the opposite: keep `posts` and `post` as public and create new `adminPosts` and `adminPost` endpoints that can contain sensitive information.) + +::: + +## Login & Signup Pages + +Yet another generator is here for you, this time one that will create pages for login, signup and forgot password pages: + +```bash +yarn rw g dbAuth +``` + +Again several pages will be created and some post-install instructions will describe next steps. But for now, try going to [http://localhost:8910/login](http://localhost:8910/login): + +![Generated login page](https://user-images.githubusercontent.com/300/146464693-a8fc4cf9-7fed-474f-8335-bb4c80fe0a5e.png) + +That was easy! We don't have a user to login with, so try going to the signup page instead (there's a link under the Login button, or just head to [http://localhost:8910/signup](http://localhost:8910/signup)): + +![Generated signup page](https://user-images.githubusercontent.com/300/146464785-a5996b19-27c5-493c-8fb3-1c753add31a6.png) + +dbAuth defaults to the generic "Username" for the first field, but in our case the username will be an email address (we can change that label in a moment). Create yourself a user with email and password: + +![image](https://user-images.githubusercontent.com/300/146464870-cb859f8b-175f-4170-8da4-5286facd1fe5.png) + +And after clicking "Signup" you should end up back on the homepage, where everything looks the same! Yay? But now try going to [http://localhost:8910/admin/posts](http://localhost:8910/admin/posts): + +![Posts admin](https://user-images.githubusercontent.com/300/146465485-c169a4b8-f398-47ec-8412-4fc15a666976.png) + +Awesome! Signing up will automatically log you in (although this behavior [can be changed](../../auth/dbauth.md#signuphandler)) and if you look in the code for the `SignupPage` you'll see where the redirect to the homepage takes place (hint: check out line 21). + +## Add a Logout Link + +Now that we're logged in, how do we log out? Let's add a link to the `BlogLayout` so that it's present on all pages, and also include an indicator of who you're actually logged in as. + +Redwood provides a [hook](../../authentication.md#destructuring-the-useauth-hook) `useAuth` which we can use in our components to determine the state of the user's login-ness, get their user info, and more. In `BlogLayout` we want to destructure the `isAuthenticated`, `currentUser` and `logOut` properties from `useAuth()`: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/layouts/BlogLayout/BlogLayout.jsx" +import { Link, routes } from '@redwoodjs/router' + +// highlight-next-line +import { useAuth } from 'src/auth' + +const BlogLayout = ({ children }) => { + // highlight-next-line + const { isAuthenticated, currentUser, logOut } = useAuth() + + return ( + <> + <header> + <h1> + <Link to={routes.home()}>Redwood Blog</Link> + </h1> + <nav> + <ul> + <li> + <Link to={routes.home()}>Home</Link> + </li> + <li> + <Link to={routes.about()}>About</Link> + </li> + <li> + <Link to={routes.contact()}>Contact</Link> + </li> + </ul> + </nav> + </header> + <main>{children}</main> + </> + ) +} + +export default BlogLayout +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/layouts/BlogLayout/BlogLayout.tsx" +import { Link, routes } from '@redwoodjs/router' + +// highlight-next-line +import { useAuth } from 'src/auth' + +type BlogLayoutProps = { + children?: React.ReactNode +} + +const BlogLayout = ({ children }: BlogLayoutProps) => { + // highlight-next-line + const { isAuthenticated, currentUser, logOut } = useAuth() + + return ( + <> + <header> + <h1> + <Link to={routes.home()}>Redwood Blog</Link> + </h1> + <nav> + <ul> + <li> + <Link to={routes.home()}>Home</Link> + </li> + <li> + <Link to={routes.about()}>About</Link> + </li> + <li> + <Link to={routes.contact()}>Contact</Link> + </li> + </ul> + </nav> + </header> + <main>{children}</main> + </> + ) +} + +export default BlogLayout +``` + +</TabItem> +</Tabs> + +As you can probably tell by the names: + +* **isAuthenticated**: a boolean as to whether or not a user is logged in +* **currentUser**: any details the app has on that user (more on this in a moment) +* **logOut**: removes the user's session and logs them out + +At the top right of the page, let's show the email address of the user (if they're logged in) as well as a link to log out. If they're not logged in, let's show a link to do just that: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/layouts/BlogLayout/BlogLayout.jsx" +import { Link, routes } from '@redwoodjs/router' + +import { useAuth } from 'src/auth' + +const BlogLayout = ({ children }) => { + const { isAuthenticated, currentUser, logOut } = useAuth() + + return ( + <> + <header> + // highlight-next-line + <div className="flex-between"> + <h1> + <Link to={routes.home()}>Redwood Blog</Link> + </h1> + // highlight-start + {isAuthenticated ? ( + <div> + <span>Logged in as {currentUser.email}</span>{' '} + <button type="button" onClick={logOut}> + Logout + </button> + </div> + ) : ( + <Link to={routes.login()}>Login</Link> + )} + </div> + // highlight-end + <nav> + <ul> + <li> + <Link to={routes.home()}>Home</Link> + </li> + <li> + <Link to={routes.about()}>About</Link> + </li> + <li> + <Link to={routes.contact()}>Contact</Link> + </li> + </ul> + </nav> + </header> + <main>{children}</main> + </> + ) +} + +export default BlogLayout +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/layouts/BlogLayout/BlogLayout.tsx" +import { Link, routes } from '@redwoodjs/router' + +import { useAuth } from 'src/auth' + +type BlogLayoutProps = { + children?: React.ReactNode +} + +const BlogLayout = ({ children }: BlogLayoutProps) => { + const { isAuthenticated, currentUser, logOut } = useAuth() + + return ( + <> + <header> + // highlight-next-line + <div className="flex-between"> + <h1> + <Link to={routes.home()}>Redwood Blog</Link> + </h1> + // highlight-start + {isAuthenticated ? ( + <div> + <span>Logged in as {currentUser.email}</span>{' '} + <button type="button" onClick={logOut}> + Logout + </button> + </div> + ) : ( + <Link to={routes.login()}>Login</Link> + )} + </div> + // highlight-end + <nav> + <ul> + <li> + <Link to={routes.home()}>Home</Link> + </li> + <li> + <Link to={routes.about()}>About</Link> + </li> + <li> + <Link to={routes.contact()}>Contact</Link> + </li> + </ul> + </nav> + </header> + <main>{children}</main> + </> + ) +} + +export default BlogLayout +``` + +</TabItem> +</Tabs> + +![image](https://user-images.githubusercontent.com/300/146466685-cd91d9e6-e341-4698-81a6-cc404d6b3098.png) + +Well, it's almost right! Where's our email address? By default, the function that determines what's in `currentUser` only returns that user's `id` field for security reasons (better to expose too little than too much, remember!). To add email to that list, check out `api/src/lib/auth.{js,ts}`: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```javascript title="api/src/lib/auth.js" +import { AuthenticationError, ForbiddenError } from '@redwoodjs/graphql-server' +import { db } from './db' + +export const getCurrentUser = async (session) => { + return await db.user.findUnique({ + where: { id: session.id }, + select: { id: true }, + }) +} + +export const isAuthenticated = () => { + return !!context.currentUser +} + +export const hasRole = (roles) => { + if (!isAuthenticated()) { + return false + } + + const currentUserRoles = context.currentUser?.roles + + if (typeof roles === 'string') { + if (typeof currentUserRoles === 'string') { + // roles to check is a string, currentUser.roles is a string + return currentUserRoles === roles + } else if (Array.isArray(currentUserRoles)) { + // roles to check is a string, currentUser.roles is an array + return currentUserRoles?.some((allowedRole) => roles === allowedRole) + } + } + + if (Array.isArray(roles)) { + if (Array.isArray(currentUserRoles)) { + // roles to check is an array, currentUser.roles is an array + return currentUserRoles?.some((allowedRole) => + roles.includes(allowedRole) + ) + } else if (typeof currentUserRoles === 'string') { + // roles to check is an array, currentUser.roles is a string + return roles.some((allowedRole) => currentUserRoles === allowedRole) + } + } + + // roles not found + return false +} + +export const requireAuth = ({ roles } = {}) => { + if (!isAuthenticated()) { + throw new AuthenticationError("You don't have permission to do that.") + } + + if (roles && !hasRole(roles)) { + throw new ForbiddenError("You don't have access to do that.") + } +} +``` +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```ts title="api/src/lib/auth.ts" +import { AuthenticationError, ForbiddenError } from '@redwoodjs/graphql-server' +import { db } from './db' + +import type { DbAuthSession } from '@redwoodjs/api' + +export const getCurrentUser = async (session: DbAuthSession<number>) => { + return await db.user.findUnique({ + where: { id: session.id }, + select: { id: true }, + }) +} + +export const isAuthenticated = (): boolean => { + return !!context.currentUser +} + +type AllowedRoles = string | string[] | undefined + +export const hasRole = (roles: AllowedRoles): boolean => { + if (!isAuthenticated()) { + return false + } + + const currentUserRoles = context.currentUser?.roles + + if (typeof roles === 'string') { + if (typeof currentUserRoles === 'string') { + // roles to check is a string, currentUser.roles is a string + return currentUserRoles === roles + } else if (Array.isArray(currentUserRoles)) { + // roles to check is a string, currentUser.roles is an array + return currentUserRoles?.some((allowedRole) => roles === allowedRole) + } + } + + if (Array.isArray(roles)) { + if (Array.isArray(currentUserRoles)) { + // roles to check is an array, currentUser.roles is an array + return currentUserRoles?.some((allowedRole) => + roles.includes(allowedRole) + ) + } else if (typeof currentUserRoles === 'string') { + // roles to check is an array, currentUser.roles is a string + return roles.some((allowedRole) => currentUserRoles === allowedRole) + } + } + + // roles not found + return false +} + +export const requireAuth = ({ roles }: { roles?: AllowedRoles } = {}) => { + if (!isAuthenticated()) { + throw new AuthenticationError("You don't have permission to do that.") + } + + if (roles && !hasRole(roles)) { + throw new ForbiddenError("You don't have access to do that.") + } +} +``` +</TabItem> +</Tabs> + +The `getCurrentUser()` function is where the magic happens: whatever is returned by this function is the content of `currentUser`, in both the web and api sides! In the case of dbAuth, the single argument passed in, `session`, contains the `id` of the user that's logged in. It then looks up the user in the database with Prisma, selecting just the `id`. Let's add `email` to this list: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```javascript title="api/src/lib/auth.js" +export const getCurrentUser = async (session) => { + return await db.user.findUnique({ + where: { id: session.id }, + // highlight-next-line + select: { id: true, email: true}, + }) +} +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```ts title="api/src/lib/auth.ts" +export const getCurrentUser = async (session) => { + return await db.user.findUnique({ + where: { id: session.id }, + // highlight-next-line + select: { id: true, email: true}, + }) +} +``` + +</TabItem> +</Tabs> + +Now our email should be present at the upper right on the homepage: + +![image](https://user-images.githubusercontent.com/300/146467129-c0446c1a-3648-4787-9675-d66eb80b8ab6.png) + +Before we leave this file, take a look at `requireAuth()`. Remember when we talked about the `@requireAuth` directive and how when we first installed authentication we saw the message "You don't have permission to do that."? This is where that came from! + +## Session Secret + +After the initial `setup` command, which installed dbAuth, you may have noticed that an edit was made to the `.env` file in the root of your project. The `setup` script appended a new ENV var called `SESSION_SECRET` along with a big random string of numbers and letters. This is the encryption key for the cookies that are stored in the user's browser when they log in. This secret should never be shared, never checked into your repo, and should be re-generated for each environment you deploy to. + +You can generate a new value with the `yarn rw g secret` command. It only outputs it to the terminal, you'll need to copy/paste to your `.env` file. Note that if you change this secret in a production environment, all users will be logged out on their next request because the cookie they currently have cannot be decrypted with the new key! They'll need to log in again to a new cookie encrypted with the new key. + +## Wrapping Up + +Believe it or not, that's pretty much it for authentication! You can use the combination of `@requireAuth` and `@skipAuth` directives to lock down access to GraphQL query/mutations, and the `<PrivateSet>` component to restrict access to entire pages of your app. If you only want to restrict access to certain components, or certain parts of a component, you can always get `isAuthenticated` from the `useAuth()` hook and then render one thing or another. + +Head over to the Redwood docs to read more about [self-hosted](../../auth/dbauth.md) and [third-party authentication](../../authentication.md#official-integrations). + +## One More Thing + +Remember the GraphQL Playground exercise at the end of [Creating a Contact](../chapter3/saving-data.md#creating-a-contact)? Try to run that again now that authentication is in place and you should get that error we've been talking about because of the `@requireAuth` directive! But, creating a *new* contact should still work just fine (because we're using `@skipAuth` on that mutation). + +However, simulating a logged-in user through the GraphQL Playground is no picnic. But, we're working on improving the experience! diff --git a/docs/versioned_docs/version-7.0/tutorial/chapter4/deployment.md b/docs/versioned_docs/version-7.0/tutorial/chapter4/deployment.md new file mode 100644 index 000000000000..bf7376195eb3 --- /dev/null +++ b/docs/versioned_docs/version-7.0/tutorial/chapter4/deployment.md @@ -0,0 +1,190 @@ +# Deployment + +The whole reason we started building Redwood was to make full-stack web apps easier to build and deploy on the Jamstack. While technically we already deployed in the previous section, it doesn't actually work yet. Let's fix that. + +### Git + +Remember at the start of the tutorial when we said that you didn't *really* need to use git if you didn't want to? Well, if you want to follow along with this deploy, you'll need to start using it now. Sorry! Commit your changes and push up to GitHub, GitLab or Bitbucket if you want to continue to follow along. Need a git primer? The most concise one we've seen is to simply create a new repo on GitHub. You'll be shown the list of commands necessary to get your local code committed and pushed up: + +![image](https://user-images.githubusercontent.com/300/152596271-7921c9dc-fe83-4827-b7e4-2740e826fb42.png) + +But instead of just `git add README.md` use `git add .` since you've got an entire codebase ready to go. + +### The Database + +We'll need a database somewhere on the internet to store our data. We've been using SQLite locally, but the kind of deployment we're going to do doesn't have a persistent disk store that we can put SQLite's file-based database on. So, for this part of this tutorial, we will use Postgres. (Prisma currently supports SQLite, Postgres, MySQL and SQL Server.) Don't worry if you aren't familiar with Postgres, Prisma will do all the heavy lifting. We just need to get a database available to the outside world so it can be accessed by our app. + +:::danger + +Prisma only supports one database provider at a time, and since we can't use SQLite in production and *must* switch to Postgres or MySQL, that means we need to use the same database on our local development system after making this change. See our [Local Postgres Setup](../../local-postgres-setup.md) guide to get you started. + +::: + +There are several hosting providers where you can quickly start up a Postgres instance: + +- [Railway](https://railway.app/) +- [Heroku](https://www.heroku.com/postgres) +- [Digital Ocean](https://www.digitalocean.com/products/managed-databases) +- [AWS](https://aws.amazon.com/rds/postgresql/) + +We're going to go with Railway for now because it's a) free and b) ridiculously easy to get started, by far the easiest we've found. You don't even need to create a login! The only limitation is that if you *don't* create an account, your database will be removed after one day. If you think you can finish everything you need to do in the next 24 hours, go for it! Otherwise just create an account first and it'll stick around. + +Head over to Railway and click **Start a New Project**: + +![image](https://user-images.githubusercontent.com/300/152593861-3063732c-b459-4ee9-86ee-e00b28c003fb.png) + +And then Provision PostgreSQL: + +![image](https://user-images.githubusercontent.com/300/152593907-1f8b599e-b4fb-4930-a841-866505e3b79d.png) + +And believe it or not, we're done! Now we just need the connection URL. Click on **PostgreSQL** at the left, and then the **Connect** tab. Copy the **Postgres Connection URL**, the one that starts with `postgresql://`: + +![image](https://user-images.githubusercontent.com/300/107562577-da7eb180-6b94-11eb-8731-e86a1c7127af.png) + +### Change Database Provider + +We need to let Prisma know that we intend to use Postgres instead of SQLite from now on. Update the `provider` entry in `schema.prisma`: + +```javascript +provider = "postgresql" +``` + +### Recreate Migrations + +We will need to re-create our database migrations in a Postgres-compatible format. First, we need to tell Prisma where our new database lives so that it can access it from our dev environment. Open up `.env` and uncomment the `DATABASE_URL` var and update it to be the URL you copied from Railway, and save. + +:::info + +Note that `.env` is not checked into git by default, and should not be checked in under any circumstances! This file will be used to contain any secrets that your codebase needs (like database URLs and API keys) that should never been seen publicly. If you were to check this file in your repo, and your repo was public, anyone on the internet can see your secret stuff! + +The `.env.defaults` file is meant for other environment variables (like non-sensitive config options for libraries, log levels, etc.) that are safe to be seen by the public and is meant to be checked into your repo and shared with other devs. + +::: + +Next, delete the `api/db/migrations` folder completely. + +Finally, run: + +```bash +yarn rw prisma migrate dev +``` + +All of the changes we made will be consolidated into a single, new migration file and applied to the Railway database instance. You can name this one something like "initial schema". + +That's it for the database setup! Now to let Netlify know about it. + +### Netlify + +So the database is settled, but we need to actually put our code on the internet somewhere. That's where Netlify comes in. + +Before we setup Netlify we'll need to setup our code with a setup command. Setup! + +```bash +yarn rw setup deploy netlify +``` + +This adds a `netlify.toml` config file in the root of the project that is good to go as-is, but you can tweak it as your app grows (check out the comments at the top of the file for links to resources about customizing). Make sure you commit and push up these code changes to your repo. + +And with that, we're ready to setup Netlify itself. + +:::warning +While you may be tempted to use the [Netlify CLI](https://cli.netlify.com) commands to [build](https://cli.netlify.com/commands/build) and [deploy](https://cli.netlify.com/commands/deploy) your project directly from you local project directory, doing so **will lead to errors when deploying and/or when running functions**. I.e. errors in the function needed for the GraphQL server, but also other serverless functions. + +The main reason for this is that these Netlify CLI commands simply build and deploy -- they build your project locally and then push the dist folder. That means that when building a RedwoodJS project, the [Prisma client is generated with binaries matching the operating system at build time](https://cli.netlify.com/commands/link) -- and not the [OS compatible](https://www.prisma.io/docs/reference/api-reference/prisma-schema-reference#binarytargets-options) with running functions on Netlify. Your Prisma client engine may be `darwin` for OSX or `windows` for Windows, but it needs to be `debian-openssl-1.1.x` or `rhel-openssl-1.1.x`. If the client is incompatible, your functions will fail. + + +Therefore, **please follow the instructions below** to sync your GitHub (or other compatible source control service) repository with Netlify and allow their build and deploy system to manage deployments. +::: + +#### Signup + +[Create a Netlify account](https://app.netlify.com/signup) if you don't have one already. Once you've signed up and verified your email done just click the **New site from Git** button at the upper right: + +![Netlify New Site picker](https://user-images.githubusercontent.com/300/73697486-85f84a80-4693-11ea-922f-0f134a3e9031.png) + +Now just authorize Netlify to connect to your git hosting provider and find your repo. When the deploy settings come up you can leave everything as the defaults and click **Deploy site**. + +Netlify will start building your app and it will eventually say the deployment failed. Why? We haven't told it where to find our database yet! + +#### Environment Variables + +Go back to the main site page and then to **Site settings** at the top, and then **Environment variables**. Click **Add a Variable** and this is where we'll paste the database connection URI we got from Railway (note the **Key** is "DATABASE_URL"). After pasting the value, append `?connection_limit=1` to the end. The URI will have the following format: `postgresql://<user>:<pass>@<url>/<db>?connection_limit=1`. The default values for Scopes and Values can be left as is. Click **Create variable** to proceed. + +:::tip + +This connection limit setting is [recommended by Prisma](https://www.prisma.io/docs/guides/performance-and-optimization/connection-management#recommended-connection-pool-size-1) when working with relational databases in a Serverless context. +::: + +We'll need to add one more environment variable, `SESSION_SECRET` which contains a big long string that's used to encrypt the session cookies for dbAuth. This was included in development when you installed dbAuth, but now we need to tell Netlify about it. If you look in your `.env` file you'll see it at the bottom, but we want to create a unique one for every environment we deploy to (each developer should have a unique one as well). We've got a CLI command to create a new one: + +```bash +yarn rw g secret +``` + +Copy that over to Netlify along with `DATABASE_URL`: + +![Adding ENV var](https://user-images.githubusercontent.com/2931245/204148740-f8aaa276-e9b1-4ffc-a842-7602a1e0111a.png) + +#### IT'S ALIVE + +Now go over to the **Deploys** tab in the top nav and open the **Trigger deploy** dropdown on the right, then finally choose **Deploy site**: + +![Trigger deploy](https://user-images.githubusercontent.com/300/83187760-835aae80-a0e3-11ea-9733-ff54969bba1f.png) + +With a little luck (and SCIENCE) it will complete successfully! You can click the **Preview** button at the top of the deploy log page, or go back and click the URL of your Netlify site towards the top: + +![Netlify URL](https://user-images.githubusercontent.com/300/83187909-bef57880-a0e3-11ea-97dc-e557248acd3a.png) + +:::info + +If you view a deploy via the **Preview** button notice that the URL contains a hash of the latest commit. Netlify will create one of these for every push to `main` but it will only ever show this exact commit, so if you deploy again and refresh you won't see any changes. The real URL for your site (the one you get from your site's homepage in Netlify) will always show the latest successful deploy. See [Branch Deploys](#branch-deploys) below for more info. + +::: + +Did it work? If you see "Empty" under the About and Contact links then it did! Yay! You're seeing "Empty" because you don't have any posts in your brand new production database so head to `/admin/posts` and create a couple, then go back to the homepage to see them. + +If the deploy failed, check the log output in Netlify and see if you can make sense of the error. If the deploy was successful but the site doesn't come up, try opening the web inspector and look for errors. Are you sure you pasted the entire Postgres connection string correctly? If you're really, really stuck head over to the [Redwood Community](https://community.redwoodjs.com) and ask for help. + +#### Custom Subdomain + +You can customize the subdomain that your site is published at (who wants to go to `agitated-mongoose-849e99.netlify.app`??) by going to **Site Settings > Domain Management > Domains > Custom Domains**. Open up the **Options** menu and select **Edit site name**. Your site should be available at your custom subdomain (`redwood-tutorial.netlify.app` is much nicer) almost immediately. + +![image](https://user-images.githubusercontent.com/300/154521450-ee64c77c-e658-4045-9dd6-119858b6739e.png) + +Note that your subdomain needs to be unique across all of Netlify, so `blog.netlify.app` is probably already taken! You can also connect a completely custom domain: click the **Add custom domain** button. + +#### Branch Deploys + +Another neat feature of Netlify is _Branch Deploys_. When you create a branch and push it up to your repo, Netlify will build that branch at a unique URL so that you can test your changes, leaving the main site alone. Once your branch is merged to `main` then a deploy at your main site will run and your changes will show to the world. To enable Branch Deploys go to **Site settings** > **Build & deploy** > **Continuous Deployment** and under the **Branches** section click **Edit settings** and change **Branch deploys** to "All". You can also enable _Deploy previews_ which will create them for any pull requests against your repo. + +![Netlify settings screenshot](https://user-images.githubusercontent.com/7134153/182321177-2d845d77-36f4-4146-9fb9-55ae83a30983.png) + +:::tip + +You also have the ability to "lock" the `main` branch so that deploys do not automatically occur on every push—you need to manually tell Netlify to deploy the latest, either by going to the site or using the [Netlify CLI](https://cli.netlify.com/). + +::: + +### Database Concerns + +#### Connections + +In this tutorial, your serverless functions will be connecting directly to the Postgres database. Because Postgres has a limited number of concurrent connections it will accept, this does not scale—imagine a flood of traffic to your site which causes a 100x increase in the number of serverless function calls. Netlify (and behind the scenes, AWS) will happily spin up 100+ serverless Lambda instances to handle the traffic. The problem is that each one will open its own connection to your database, potentially exhausting the number of available connections. The proper solution is to put a connection pooling service in front of Postgres and connect to that from your lambda functions. To learn how to do that, see the [Connection Pooling](../../connection-pooling.md) guide. + +#### Security + +Your database will need to be open to the world because you never know what IP address a serverless function will have when it runs. You could potentially get the CIDR block for ALL IP addresses that your hosting provider has and only allow connections from that list, but those ranges usually change over time and keeping them in sync is not trivial. As long as you keep your DB username/password secure you should be safe, but we understand this is not the ideal solution. + +As this form of full-stack Jamstack gains more prominence we're counting on database providers to provide more robust, secure solutions that address these issues. Our team is working closely with several of them and will hopefully have good news to share in the near future! + +##### The Signup Problem + +Speaking of security, you may have noticed a glaring security hole in our build: anyone can come along and sign up for a new account and start creating blog posts! That's not ideal. A quick and easy solution would be to remove the `signup` route after you've created your own account: now there's no signup page accessible and a normal human will give up. But what about devious hackers? + +dbAuth provides an API for signup and login that the client knows how to call, but if someone were crafty enough they could make their own API calls to that same endpoint and still create a new user even without the signup page! Ahhhh! We finally made it through this long (but fun!) tutorial, can't we just take a break and put our feet up? Unfortunately, the war against bad actors never really ends. + +To close this hole, check out `api/src/functions/auth.js`, this is where the configuration for dbAuth lives. Take a gander at the `signupOptions` object, specifically the `handler()` function. This defines what to do with the user data that's submitted on the signup form. If you simply have this function return `false`, instead of creating a user, we will have effectively shut the door on the API signup hack. + +Commit your changes and push your repo, and Netlify will re-deploy your site. Take that you hacking [snollygosters](https://www.merriam-webster.com/dictionary/snollygoster)! + +![100% accurate portrayal of hacking](https://user-images.githubusercontent.com/300/152592915-609747f9-3d68-4d72-8cd8-e120ef83b640.gif) diff --git a/docs/versioned_docs/version-7.0/tutorial/chapter5/first-story.md b/docs/versioned_docs/version-7.0/tutorial/chapter5/first-story.md new file mode 100644 index 000000000000..8b111bfa990d --- /dev/null +++ b/docs/versioned_docs/version-7.0/tutorial/chapter5/first-story.md @@ -0,0 +1,247 @@ +# Our First Story + +Let's say that on our homepage we only want to show the first couple of sentences in our blog post as a short summary, and then you'll have to click through to see the full post. + +First let's update the `Article` component to contain that functionality: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/Article/Article.jsx" +import { Link, routes } from '@redwoodjs/router' + +// highlight-start +const truncate = (text, length) => { + return text.substring(0, length) + '...' +} +// highlight-end + +// highlight-next-line +const Article = ({ article, summary = false }) => { + return ( + <article className="mt-10"> + <header> + <h2 className="text-xl text-blue-700 font-semibold"> + <Link to={routes.article({ id: article.id })}>{article.title}</Link> + </h2> + </header> + <div className="mt-2 text-gray-900 font-light"> + // highlight-next-line + {summary ? truncate(article.body, 100) : article.body} + </div> + </article> + ) +} + +export default Article +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/components/Article/Article.tsx" +import { Link, routes } from '@redwoodjs/router' + +import type { Post } from 'types/graphql' + +// highlight-start +const truncate = (text: string, length: number) => { + return text.substring(0, length) + '...' +} +// highlight-end + +interface Props { + // highlight-start + article: Omit<Post, 'createdAt'> + summary?: boolean + // highlight-end +} + +// highlight-next-line +const Article = ({ article, summary = false }: Props) => { + return ( + <article className="mt-10"> + <header> + <h2 className="text-xl text-blue-700 font-semibold"> + <Link to={routes.article({ id: article.id })}>{article.title}</Link> + </h2> + </header> + <div className="mt-2 text-gray-900 font-light"> + // highlight-next-line + {summary ? truncate(article.body, 100) : article.body} + </div> + </article> + ) +} + +export default Article +``` + +</TabItem> +</Tabs> + + +We'll pass an additional `summary` prop to the component to let it know if it should show just the summary or the whole thing. We default it to `false` to preserve the existing behavior—always showing the full body. + +Now in the Storybook story let's create a `summary` story that uses the `Article` component the same way that `generated` does, but adds the new `summary` prop. We'll take the content of the sample post and put that in a constant that both stories will use. We'll also rename `generated` to `full` to make it clear what's different between the two: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/components/Article/Article.stories.jsx" +import Article from './Article' + +// highlight-start +const ARTICLE = { + id: 1, + title: 'First Post', + body: `Neutra tacos hot chicken prism raw denim, put a bird on it enamel pin post-ironic vape cred DIY. Street art next level umami squid. Hammock hexagon glossier 8-bit banjo. Neutra la croix mixtape echo park four loko semiotics kitsch forage chambray. Semiotics salvia selfies jianbing hella shaman. Letterpress helvetica vaporware cronut, shaman butcher YOLO poke fixie hoodie gentrify woke heirloom.`, +} +// highlight-end + +// highlight-start +export const full = () => { + return <Article article={ARTICLE} /> +} +// highlight-end + +// highlight-start +export const summary = () => { + return <Article article={ARTICLE} summary={true} /> +} +// highlight-end + +export default { title: 'Components/Article' } +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/components/Article/Article.stories.tsx" +import Article from './Article' + +// highlight-start +const ARTICLE = { + id: 1, + title: 'First Post', + body: `Neutra tacos hot chicken prism raw denim, put a bird on it enamel pin post-ironic vape cred DIY. Street art next level umami squid. Hammock hexagon glossier 8-bit banjo. Neutra la croix mixtape echo park four loko semiotics kitsch forage chambray. Semiotics salvia selfies jianbing hella shaman. Letterpress helvetica vaporware cronut, shaman butcher YOLO poke fixie hoodie gentrify woke heirloom.`, +} +// highlight-end + +// highlight-start +export const full = () => { + return <Article article={ARTICLE} /> +} +// highlight-end + +// highlight-start +export const summary = () => { + return <Article article={ARTICLE} summary={true} /> +} +// highlight-end + +export default { title: 'Components/Article' } +``` + +</TabItem> +</Tabs> + +As soon as you save the change the stories Storybook should refresh and may show an error: there's no longer a "Generated" story to show! In the tree on the left, expand "Article" and the "Full" version should show right away. Click on "Summary" to see the difference: + +![image](https://user-images.githubusercontent.com/300/153311838-595b8b38-d899-4d7b-891b-a492f0c8f2e2.png) + +### Displaying the Summary + +Great! Now to complete the picture let's use the summary in our home page display of blog posts. The actual Home page isn't what references the `Article` component though, that's in the `ArticlesCell`. We'll add the `summary` prop and then check the result in Storybook: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/ArticlesCell/ArticlesCell.jsx" +import Article from 'src/components/Article' + +export const QUERY = gql` + query ArticlesQuery { + articles: posts { + id + title + body + createdAt + } + } +` + +export const Loading = () => <div>Loading...</div> + +export const Empty = () => <div>Empty</div> + +export const Failure = ({ error }) => <div>Error: {error.message}</div> + +export const Success = ({ articles }) => { + return ( + <div className="space-y-10"> + {articles.map((article) => ( + // highlight-next-line + <Article article={article} key={article.id} summary={true} /> + ))} + </div> + ) +} +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/components/ArticlesCell/ArticlesCell.tsx" +import Article from 'src/components/Article' + +import type { ArticlesQuery } from 'types/graphql' +import type { CellSuccessProps, CellFailureProps } from '@redwoodjs/web' + +export const QUERY = gql` + query ArticlesQuery { + articles: posts { + id + title + body + createdAt + } + } +` + +export const Loading = () => <div>Loading...</div> + +export const Empty = () => <div>Empty</div> + +export const Failure = ({ error }: CellFailureProps) => ( + <div>Error: {error.message}</div> +) + +export const Success = ({ articles }: CellSuccessProps<ArticlesQuery>) => { + return ( + <div className="space-y-10"> + {articles.map((article) => ( + // highlight-next-line + <Article article={article} key={article.id} summary={true} /> + ))} + </div> + ) +} +``` + +</TabItem> +</Tabs> + +Check out the story to see the new summary view: + +![image](https://user-images.githubusercontent.com/300/153312022-1cfbf696-b2cb-4fca-b640-4111643fb396.png) + +And if you head to the real site you'll see the summary there as well: + +![image](https://user-images.githubusercontent.com/300/101545160-b2d45880-395b-11eb-9a32-f8cb8106de7f.png) + +We can double check that our original usage of `Article` (the one without the `summary` prop) in `ArticleCell` still renders the entire post, not just the truncated version: + +![image](https://user-images.githubusercontent.com/300/153312180-2a80df75-ea95-4e7b-9eb5-45fa900333e9.png) + +Storybook makes it easy to create and modify your components in isolation and actually helps enforce a general best practice when building React applications: components should be self-contained and reusable by just changing the props that are sent in. diff --git a/docs/versioned_docs/version-7.0/tutorial/chapter5/first-test.md b/docs/versioned_docs/version-7.0/tutorial/chapter5/first-test.md new file mode 100644 index 000000000000..f5d13ed06fe4 --- /dev/null +++ b/docs/versioned_docs/version-7.0/tutorial/chapter5/first-test.md @@ -0,0 +1,500 @@ +# Our First Test + +So if Storybook is the first phase of creating/updating a component, phase two must be confirming the functionality with a test. Let's add a test for our new summary feature. + +If you've never done any kind of testing before this may be a little hard to follow. We've got a great document [all about testing](../../testing.md) (including some philosophy, for those so inclined) if you want a good overview of testing in general. We even build a super-simple test runner from scratch in plain JavaScript to take some of the mystery out of how this all works! + +If you still have the test process running from the previous page then then you can just press `a` to run **a**ll tests. If you stopped your test process, you can start it again with: + +```bash +yarn rw test +``` + +Can you guess what broke in this test? + +![image](https://user-images.githubusercontent.com/300/153312402-dd7f08bc-e23d-4acc-8202-cdfc9798a911.png) + +The test was looking for the full text of the blog post, but remember that in `ArticlesCell` we had `Article` only display the *summary* of the post. This test is looking for the full text match, which is no longer present on the page. + +Let's update the test so that it checks for the expected behavior instead. There are entire books written on the best way to test, so no matter what we decide on testing in this code there will be someone out there to tell us we're doing it wrong. As just one example, the simplest test would be to just copy what's output and use that for the text in the test: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/ArticlesCell.test.jsx" +test('Success renders successfully', async () => { + const articles = standard().articles + render(<Success articles={articles} />) + + // highlight-start + expect(screen.getByText(articles[0].title)).toBeInTheDocument() + expect( + screen.getByText( + 'Neutra tacos hot chicken prism raw denim, put a bird on it enamel pin post-ironic vape cred DIY. Str...' + ) + ).toBeInTheDocument() + // highlight-end +}) +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/components/ArticlesCell.test.tsx" +test('Success renders successfully', async () => { + const articles = standard().articles + render(<Success articles={articles} />) + + // highlight-start + expect(screen.getByText(articles[0].title)).toBeInTheDocument() + expect( + screen.getByText( + 'Neutra tacos hot chicken prism raw denim, put a bird on it enamel pin post-ironic vape cred DIY. Str...' + ) + ).toBeInTheDocument() + // highlight-end +}) +``` + +</TabItem> +</Tabs> + +But the truncation length could change later, so how do we encapsulate that in our test? Or should we? The number of characters to truncate to is hardcoded in the `Article` component, which this component shouldn't really care about: it should be up to the page that's presenting the article to determine much or how little to show (based on space concerns, design constraints, etc.) don't you think? Even if we refactored the `truncate()` function into a shared place and imported it into both `Article` and this test, the test will still be knowing too much about `Article`—why should it have detailed knowledge of the internals of `Article` and that it's making use of this `truncate()` function at all? It shouldn't! One theory of testing says that the thing you're testing should be a black box: you can't see inside of it, all you can test is what data comes out when you send certain data in. + +Let's compromise—by virtue of the fact that this functionality has a prop called "summary" we can guess that it's doing *something* to shorten the text. So what if we test three things that we can make reasonable assumptions about right now: + +1. The full body of the post body *is not* present +2. But, at least the first couple of words of the post *are* present +3. The text that is shown ends in "..." + +This gives us a buffer if we decide to truncate to something like 25 words, or even if we go up to a couple of hundred. What it *doesn't* encompass, however, is the case where the body of the blog post is shorter than the truncate limit. In that case the full text *would* be present, and we should probably update the `truncate()` function to not add the `...` in that case. We'll leave adding that functionality and test case up to you to add in your free time. ;) + +### Adding the Test + +Okay, let's do this: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/ArticlesCell.test.jsx" +// highlight-next-line +import { render, screen, within } from '@redwoodjs/testing' + +import { Loading, Empty, Failure, Success } from './ArticlesCell' +import { standard } from './ArticlesCell.mock' + +describe('ArticlesCell', () => { + test('Loading renders successfully', () => { + expect(() => { + render(<Loading />) + }).not.toThrow() + }) + + test('Empty renders successfully', async () => { + expect(() => { + render(<Empty />) + }).not.toThrow() + }) + + test('Failure renders successfully', async () => { + expect(() => { + render(<Failure error={new Error('Oh no')} />) + }).not.toThrow() + }) + + test('Success renders successfully', async () => { + const articles = standard().articles + render(<Success articles={articles} />) + + // highlight-start + articles.forEach((article) => { + const truncatedBody = article.body.substring(0, 10) + const matchedBody = screen.getByText(truncatedBody, { exact: false }) + const ellipsis = within(matchedBody).getByText('...', { exact: false }) + + expect(screen.getByText(article.title)).toBeInTheDocument() + expect(screen.queryByText(article.body)).not.toBeInTheDocument() + expect(matchedBody).toBeInTheDocument() + expect(ellipsis).toBeInTheDocument() + }) + // highlight-end + }) +}) +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/components/ArticlesCell.test.tsx" +// highlight-next-line +import { render, screen, within } from '@redwoodjs/testing' + +import { Loading, Empty, Failure, Success } from './ArticlesCell' +import { standard } from './ArticlesCell.mock' + +describe('ArticlesCell', () => { + test('Loading renders successfully', () => { + expect(() => { + render(<Loading />) + }).not.toThrow() + }) + + test('Empty renders successfully', async () => { + expect(() => { + render(<Empty />) + }).not.toThrow() + }) + + test('Failure renders successfully', async () => { + expect(() => { + render(<Failure error={new Error('Oh no')} />) + }).not.toThrow() + }) + + test('Success renders successfully', async () => { + const articles = standard().articles + render(<Success articles={articles} />) + + // highlight-start + articles.forEach((article) => { + const truncatedBody = article.body.substring(0, 10) + const matchedBody = screen.getByText(truncatedBody, { exact: false }) + const ellipsis = within(matchedBody).getByText('...', { exact: false }) + + expect(screen.getByText(article.title)).toBeInTheDocument() + expect(screen.queryByText(article.body)).not.toBeInTheDocument() + expect(matchedBody).toBeInTheDocument() + expect(ellipsis).toBeInTheDocument() + }) + // highlight-end + }) +}) +``` + +</TabItem> +</Tabs> + +This loops through each article in our `standard()` mock and for each one: + +```javascript +const truncatedBody = article.body.substring(0, 10) +``` + +Create a variable `truncatedBody` containing the first 10 characters of the post body. + +```javascript +const matchedBody = screen.getByText(truncatedBody, { exact: false }) +``` + +Search through the rendered HTML on the screen and find the HTML element that contains the truncated body (note the `{ exact: false }` here, as normally the exact text, and only that text, would need to be present, but in this case there's probably more than just the 10 characters). + +```javascript +const ellipsis = within(matchedBody).getByText('...', { exact: false }) +``` + +Within the HTML element that was found in the previous line, find `...`, again without an exact match. + +```javascript +expect(screen.getByText(article.title)).toBeInTheDocument() +``` + +Find the title of the article in the page. + +```javascript +expect(screen.queryByText(article.body)).not.toBeInTheDocument() +``` +When trying to find the *full* text of the body, it should *not* be present. + +```javascript +expect(matchedBody).toBeInTheDocument() +``` +Assert that the truncated text is . + +```javascript +expect(ellipsis).toBeInTheDocument() +``` +Assert that the ellipsis is present. + +:::info What's the difference between `getByText()` and `queryByText()`? + +`getByText()` will throw an error if the text isn't found in the document, whereas `queryByText()` will return `null` and let you continue with your testing (and is one way to test that some text is *not* present on the page). You can read more about these in the [DOM Testing Library Queries](https://testing-library.com/docs/dom-testing-library/api-queries) docs. + +::: + +As soon as you saved that test file the test should have run and passed! Press `a` to run the whole suite if you want to make sure nothing else broke. Remember to press `o` to go back to only testing changes again. (There's nothing wrong with running the full test suite each time, but it will take longer than only testing the things that have changed since the last time you committed your code.) + +To double check that we're testing what we think we're testing, open up `ArticlesCell.jsx` and remove the `summary={true}` prop (or set it to `false`) and the test should fail: now the full body of the post *is* on the page and the expectation in our test `expect(screen.queryByText(article.body)).not.toBeInTheDocument()` fails because the full body *is* in the document! Make sure to put the `summary={true}` back before we continue. + +### What's the Deal with Mocks? + +Did you wonder where the articles were coming from in our test? Was it the development database? Nope: that data came from a **Mock**. That's the `ArticlesCell.mock.js` file that lives next to your component, test and stories files. Mocks are used when you want to define the data that would normally be returned by GraphQL in your Storybook stories or tests. In cells, a GraphQL call goes out (the query defined by the variable `QUERY` at the top of the file) and returned to the `Success` component. We don't want to have to run the api-side server and have real data in the database just for Storybook or our tests, so Redwood intercepts those GraphQL calls and returns the data from the mock instead. + +:::info If the server is being mocked, how do we test the api-side code? + +We'll get to that next when we create a new feature for our blog from scratch! + +::: + +The names you give your mocks are then available in your tests and stories files. Just import the one you want to use (`standard` is imported for you in generated test files) and you can use the spread syntax to pass it through to your **Success** component. + +Let's say our mock looks like this: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```javascript +export const standard = () => ({ + articles: [ + { + id: 1, + title: 'First Post', + body: `Neutra tacos hot chicken prism raw denim...`, + createdAt: '2020-01-01T12:34:56Z', + }, + { + id: 2, + title: 'Second Post', + body: `Master cleanse gentrify irony put a bird on it...`, + createdAt: '2020-01-01T12:34:56Z', + }, + ], +}) +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```javascript +export const standard = () => ({ + articles: [ + { + id: 1, + title: 'First Post', + body: `Neutra tacos hot chicken prism raw denim...`, + createdAt: '2020-01-01T12:34:56Z', + }, + { + id: 2, + title: 'Second Post', + body: `Master cleanse gentrify irony put a bird on it...`, + createdAt: '2020-01-01T12:34:56Z', + }, + ], +}) +``` + +</TabItem> +</Tabs> + +The first key in the object that's returned is named `articles`. That's also the name of the prop that's expected to be sent into **Success** in the cell: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx +// highlight-next-line +export const Success = ({ articles }) => { + return ( + { articles.map((article) => <Article article={article} />) } + ) +} +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx +// highlight-next-line +export const Success = ({ articles }: CellSuccessProps<ArticlesQuery>) => { + return ( + { articles.map((article) => <Article article={article} />) } + ) +} +``` + +</TabItem> +</Tabs> + +So we can just spread the result of `standard()` in a story or test when using the **Success** component and everything works out: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/ArticlesCell/ArticlesCell.stories.jsx" +import { Success } from './ArticlesCell' +import { standard } from './ArticlesCell.mock' + +export const success = () => { + // highlight-next-line + return Success ? <Success {...standard()} /> : null +} + +export default { title: 'Cells/ArticlesCell' } +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/components/ArticlesCell/ArticlesCell.stories.tsx" +import { Success } from './ArticlesCell' +import { standard } from './ArticlesCell.mock' + +export const success = () => { + // highlight-next-line + return Success ? <Success {...standard()} /> : null +} + +export default { title: 'Cells/ArticlesCell' } +``` + +</TabItem> +</Tabs> + +Some folks find this syntax a little *too* succinct and would rather see the `<Success>` component being invoked the same way it is in their actual code. If that sounds like you, skip the spread syntax and just call the `articles` property on `standard()` the old fashioned way: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/ArticlesCell/ArticlesCell.stories.jsx" +import { Success } from './ArticlesCell' +import { standard } from './ArticlesCell.mock' + +export const success = () => { + // highlight-next-line + return Success ? <Success articles={standard().articles} /> : null +} + +export default { title: 'Cells/ArticlesCell' } +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/components/ArticlesCell/ArticlesCell.stories.tsx" +import { Success } from './ArticlesCell' +import { standard } from './ArticlesCell.mock' + +export const success = () => { + // highlight-next-line + return Success ? <Success articles={standard().articles} /> : null +} + +export default { title: 'Cells/ArticlesCell' } +``` + +</TabItem> +</Tabs> + +You can have as many mocks as you want, just import the names of the ones you need and send them in as props to your components. + +### Testing Article + +Our test suite is passing again but it's a trick! We never added a test for the actual `summary` functionality that we added to the `Article` component. We tested that `ArticlesCell` renders (that eventually render an `Article`) include a summary, but what it means to render a summary is knowledge that only `Article` contains. + +When you get into the flow of building your app it can be very easy to overlook testing functionality like this. Wasn't it Winston Churchill who said "a thorough test suite requires eternal vigilance"? Techniques like [Test Driven Development](https://en.wikipedia.org/wiki/Test-driven_development) (TDD) were established to help combat this tendency: when you want to write a new feature, write the test first, watch it fail, then write the code to make the test pass so that you know every line of real code you write is backed by a test. What we're doing is affectionately known as [Development Driven Testing](https://medium.com/table-xi/development-driven-testing-673d3959dac2). You'll probably settle somewhere in the middle but one maxim is always true: some tests are better than no tests. + +The summary functionality in `Article` is pretty simple, but there are a couple of different ways we could test it: + +* Export the `truncate()` function and test it directly +* Test the final rendered state of the component + +In this case `truncate()` "belongs to" `Article` and the outside world really shouldn't need to worry about it or know that it exists. If we came to a point in development where another component needed to truncate text then that would be a perfect time to move this function to a shared location and import it into both components that need it. `truncate()` could then have its own dedicated test. But for now let's keep our separation of concerns and test the one thing that's "public" about this component—the result of the render. + +In this case let's just test that the output matches an exact string. Since the knowledge of how long to make the summary is contained in `Article` itself, at this point it feels okay to have the test tightly coupled to the render result of this particular component. (`ArticlesCell` itself didn't know about how long to truncate, just that *something* was shortening the text.) You could spin yourself in circles trying to refactor the code to make it absolutely bulletproof to code changes breaking the tests, but will you ever actually need that level of flexibility? It's always a trade-off! + +We'll move the sample article data in the test to a constant and then use it in both the existing test (which tests that not passing the `summary` prop at all results in the full body being rendered) and our new test that checks for the summary version being rendered: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/Article/Article.test.jsx" +import { render, screen } from '@redwoodjs/testing' + +import Article from './Article' + +// highlight-start +const ARTICLE = { + id: 1, + title: 'First post', + body: `Neutra tacos hot chicken prism raw denim, put a bird on it enamel pin post-ironic vape cred DIY. Street art next level umami squid. Hammock hexagon glossier 8-bit banjo. Neutra la croix mixtape echo park four loko semiotics kitsch forage chambray. Semiotics salvia selfies jianbing hella shaman. Letterpress helvetica vaporware cronut, shaman butcher YOLO poke fixie hoodie gentrify woke heirloom.`, + createdAt: new Date().toISOString(), +} +// highlight-end + +describe('Article', () => { + it('renders a blog post', () => { + // highlight-next-line + render(<Article article={ARTICLE} />) + + // highlight-start + expect(screen.getByText(ARTICLE.title)).toBeInTheDocument() + expect(screen.getByText(ARTICLE.body)).toBeInTheDocument() + // highlight-end + }) + + // highlight-start + it('renders a summary of a blog post', () => { + render(<Article article={ARTICLE} summary={true} />) + + expect(screen.getByText(ARTICLE.title)).toBeInTheDocument() + expect( + screen.getByText( + 'Neutra tacos hot chicken prism raw denim, put a bird on it enamel pin post-ironic vape cred DIY. Str...' + ) + ).toBeInTheDocument() + }) + // highlight-end +}) +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/components/Article/Article.test.tsx" +import { render, screen } from '@redwoodjs/testing' + +import Article from './Article' + +// highlight-start +const ARTICLE = { + id: 1, + title: 'First post', + body: `Neutra tacos hot chicken prism raw denim, put a bird on it enamel pin post-ironic vape cred DIY. Street art next level umami squid. Hammock hexagon glossier 8-bit banjo. Neutra la croix mixtape echo park four loko semiotics kitsch forage chambray. Semiotics salvia selfies jianbing hella shaman. Letterpress helvetica vaporware cronut, shaman butcher YOLO poke fixie hoodie gentrify woke heirloom.`, + createdAt: new Date().toISOString(), +} +// highlight-end + +describe('Article', () => { + it('renders a blog post', () => { + // highlight-next-line + render(<Article article={ARTICLE} />) + + // highlight-start + expect(screen.getByText(ARTICLE.title)).toBeInTheDocument() + expect(screen.getByText(ARTICLE.body)).toBeInTheDocument() + // highlight-end + }) + + // highlight-start + it('renders a summary of a blog post', () => { + render(<Article article={ARTICLE} summary={true} />) + + expect(screen.getByText(ARTICLE.title)).toBeInTheDocument() + expect( + screen.getByText( + 'Neutra tacos hot chicken prism raw denim, put a bird on it enamel pin post-ironic vape cred DIY. Str...' + ) + ).toBeInTheDocument() + }) + // highlight-end +}) +``` + +</TabItem> +</Tabs> + +Saving that change should run the tests and we'll see that our suite is still happy! + +### One Last Thing + +Remember we set the `summary` prop to default to `false` if it doesn't exist, which is tested by the first test case (passing no `summary` prop at all). However, we don't have a test that checks what happens if `false` is set explicitly. Feel free to add that now if you want [100% Code Coverage](https://www.functionize.com/blog/the-myth-of-100-code-coverage)! diff --git a/docs/versioned_docs/version-7.0/tutorial/chapter5/storybook.md b/docs/versioned_docs/version-7.0/tutorial/chapter5/storybook.md new file mode 100644 index 000000000000..b07d92ae1210 --- /dev/null +++ b/docs/versioned_docs/version-7.0/tutorial/chapter5/storybook.md @@ -0,0 +1,139 @@ +# Introduction to Storybook + +Let's see what this Storybook thing is all about. Run this command to start up the Storybook server (you could stop your dev or test runners and then run this, or start another new terminal instance): + +```bash +yarn rw storybook +``` + +After some compiling you should get a message saying that Storybook has started and it's available at [http://localhost:7910](http://localhost:7910) + +![image](https://user-images.githubusercontent.com/300/153311732-21a62ee8-5bdf-45b7-b163-35a5ec0ce318.png) + +If you poke around at the file tree on the left you'll see all of the components, cells, layouts and pages we created during the tutorial. Where did they come from? You may recall that every time we generated a new page/cell/component we actually created at least *three* files: + +* `Article.{jsx,tsx}` +* `Article.stories.{jsx,tsx}` +* `Article.test.{jsx,tsx}` + +:::info + +If you generated a cell then you also got a `.mock.{js,ts}` file (more on those later). + +::: + +Those `.stories.{jsx,tsx}` files are what makes the tree on the left side of the Storybook browser possible! From their [homepage](https://storybook.js.org/), Storybook describes itself as: + +*"...an open source tool for developing UI components in isolation for React, Vue, Angular, and more. It makes building stunning UIs organized and efficient."* + +So, the idea here is that you can build out your components/cells/pages in isolation, get them looking the way you want and displaying the correct data, then plug them into your full application. + +When Storybook opened it should have opened **Components > Article > Generated** which is the generated component we created to display a single blog post. If you open `web/src/components/Article/Article.stories.{jsx,tsx}` you'll see what it takes to explain this component to Storybook, and it isn't much: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/Article/Article.stories.jsx" +import Article from './Article' + +export const generated = () => { + return ( + <Article + article={{ + id: 1, + title: 'First Post', + body: `Neutra tacos hot chicken prism raw denim, put + a bird on it enamel pin post-ironic vape cred + DIY. Street art next level umami squid. + Hammock hexagon glossier 8-bit banjo. Neutra + la croix mixtape echo park four loko semiotics + kitsch forage chambray. Semiotics salvia + selfies jianbing hella shaman. Letterpress + helvetica vaporware cronut, shaman butcher + YOLO poke fixie hoodie gentrify woke + heirloom.`, + createdAt: '2020-01-01T12:34:45Z' + }} + /> + ) +} + +export default { title: 'Components/Article' } +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/components/Article/Article.stories.tsx" +import Article from './Article' + +export const generated = () => { + return ( + <Article + article={{ + id: 1, + title: 'First Post', + body: `Neutra tacos hot chicken prism raw denim, put + a bird on it enamel pin post-ironic vape cred + DIY. Street art next level umami squid. + Hammock hexagon glossier 8-bit banjo. Neutra + la croix mixtape echo park four loko semiotics + kitsch forage chambray. Semiotics salvia + selfies jianbing hella shaman. Letterpress + helvetica vaporware cronut, shaman butcher + YOLO poke fixie hoodie gentrify woke + heirloom.`, + createdAt: '2020-01-01T12:34:45Z' + }} + /> + ) +} + +export default { title: 'Components/Article' } +``` + +</TabItem> +</Tabs> + +You import the component you want to use and then all of the named exports in the file will be a single "story" as displayed in Storybook. In this case the generator named it "generated" which shows as the "Generated" story in the tree view: + +``` +Components +└── Article + └── Generated +``` + +This makes it easy to create variants of your component and have them all displayed together. + +:::info Where did that sample blog post data come from? + +In your actual app you'd use this component like so: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx +<Article article={article} /> +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx +<Article article={article} /> +``` + +</TabItem> +</Tabs> + +Where the `article` in that prop comes from somewhere outside of this component. Here in Storybook there is no "outside" of this component, so we just send the article object into the prop directly. + +**But where did the pre-filled article data come from?** + +We (the Redwood team) added that to the story in the `redwood-tutorial` repo to show you what a story might look like after you hook up some sample data. Several of the stories need data like this, some inline and some in those `.mock.{js,ts}` files. The rest of the tutorial will be showing you how to do this yourself with new components as you create them. + +**Where did the *actual* text in the body come from?** + +[Hipster Ipsum](https://hipsum.co/), a fun alternative to Lorem Ipsum filler text! + +::: diff --git a/docs/versioned_docs/version-7.0/tutorial/chapter5/testing.md b/docs/versioned_docs/version-7.0/tutorial/chapter5/testing.md new file mode 100644 index 000000000000..17b08c048898 --- /dev/null +++ b/docs/versioned_docs/version-7.0/tutorial/chapter5/testing.md @@ -0,0 +1,56 @@ +# Introduction to Testing + +Let's run the test suite to make sure everything is working as expected (you can keep the dev server running and start this in a new terminal window): + +```bash +yarn rw test +``` + +The `test` command starts a persistent process which watches for file changes and automatically runs any tests associated with the changed file(s) (changing a component *or* its tests will trigger a test run). + +Since we just started the suite, and we haven't changed any files yet, it may not actually run any tests at all. Hit `a` to tell it run **a**ll tests and we should get something like this: + +![tests_running](https://user-images.githubusercontent.com/46945607/165376937-89ed9254-0d8e-4945-a0d9-17178764a4b0.png) + +If you cloned the example repo during the intermission and followed along with the Storybook tutorial in this chapter, the test run should finish and you will see something like this: + +![suite_finished](https://user-images.githubusercontent.com/46945607/165378519-2859dd0d-d46a-448f-a62e-0b8f91c55a87.png) + +:::info + +If you decided to keep your codebase from the first part of the tutorial, then you'll get the following error after running + +```bash +yarn rw test + +Error: Get config: Schema Parsing P1012 + +error: Error validating datasource `db`: the URL must start with the protocol `postgresql://` or `postgres://`. + --> schema.prisma:3 + | + 2 | provider = "postgresql" + 3 | url = env("DATABASE_URL") + | + +Validation Error Count: 1 + +error Command failed with exit code 1. +``` + +To clear the error and to proceed with running the test suite, head over to your `.env` file and add the following line: + +```bash +TEST_DATABASE_URL=<the same url as DATABASE_URL> +``` + +::: + +Note that the summary on the bottom indicates that there was 1 test that failed. If you feel curious, you can scroll up in your terminal and see more details on the test that failed. We'll also take a look at that failed test shortly. + +If you continued with your own repo from chapters 1-4, you may see some other failures here or none at all: we made a lot of changes to the pages, components and cells we generated, but didn't update the tests to reflect the changes we made. (Another reason to start with the [example repo](../intermission.md#using-the-example-repo-recommended)!) + +To switch back to the default mode where test are **o**nly run for changed files, press `o` now (or quit and restart `yarn rw test`). + +What we want to aim for is all green in that left column and no failed tests. In fact best practices tell us you should not even commit any code to your repo unless the test suite passes locally. Not everyone adheres to this policy quite as strictly as others...*<cough, cough>* + +We've got an excellent document on [Testing](../../testing.md) which you should definitely read if you're brand new to testing, especially the [Terminology](../../testing.md#terminology) and [Redwood and Testing](../../testing.md#redwood-and-testing) sections. For now though, proceed to the next section and we'll go over our approach to getting that last failed test passing. diff --git a/docs/versioned_docs/version-7.0/tutorial/chapter6/comment-form.md b/docs/versioned_docs/version-7.0/tutorial/chapter6/comment-form.md new file mode 100644 index 000000000000..ddcef47b56ea --- /dev/null +++ b/docs/versioned_docs/version-7.0/tutorial/chapter6/comment-form.md @@ -0,0 +1,1715 @@ +# Creating a Comment Form + +Let's generate a component to house our new comment form, build it out and integrate it via Storybook, then add some tests: + +```bash +yarn rw g component CommentForm +``` + +And startup Storybook again if it isn't still running: + +```bash +yarn rw storybook +``` + +You'll see that there's a **CommentForm** entry in Storybook now, ready for us to get started. + +![image](https://user-images.githubusercontent.com/300/153927943-648c62d2-b0c3-40f2-9bad-3aa81170d7c2.png) + +### Storybook + +Let's build a simple form to take the user's name and their comment and add some styling to match it to the blog: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/CommentForm/CommentForm.jsx" +import { + Form, + Label, + TextField, + TextAreaField, + Submit, +} from '@redwoodjs/forms' + +const CommentForm = () => { + return ( + <div> + <h3 className="font-light text-lg text-gray-600">Leave a Comment</h3> + <Form className="mt-4 w-full"> + <Label name="name" className="block text-sm text-gray-600 uppercase"> + Name + </Label> + <TextField + name="name" + className="block w-full p-1 border rounded text-xs " + validation={{ required: true }} + /> + + <Label + name="body" + className="block mt-4 text-sm text-gray-600 uppercase" + > + Comment + </Label> + <TextAreaField + name="body" + className="block w-full p-1 border rounded h-24 text-xs" + validation={{ required: true }} + /> + + <Submit + className="block mt-4 bg-blue-500 text-white text-xs font-semibold uppercase tracking-wide rounded px-3 py-2 disabled:opacity-50" + > + Submit + </Submit> + </Form> + </div> + ) +} + +export default CommentForm +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/components/CommentForm/CommentForm.tsx" +import { + Form, + Label, + TextField, + TextAreaField, + Submit, +} from '@redwoodjs/forms' + +const CommentForm = () => { + return ( + <div> + <h3 className="font-light text-lg text-gray-600">Leave a Comment</h3> + <Form className="mt-4 w-full"> + <Label name="name" className="block text-sm text-gray-600 uppercase"> + Name + </Label> + <TextField + name="name" + className="block w-full p-1 border rounded text-xs" + validation={{ required: true }} + /> + + <Label + name="body" + className="block mt-4 text-sm text-gray-600 uppercase" + > + Comment + </Label> + <TextAreaField + name="body" + className="block w-full p-1 border rounded h-24 text-xs" + validation={{ required: true }} + /> + + <Submit + className="block mt-4 bg-blue-500 text-white text-xs font-semibold uppercase tracking-wide rounded px-3 py-2 disabled:opacity-50" + > + Submit + </Submit> + </Form> + </div> + ) +} + +export default CommentForm +``` + +</TabItem> +</Tabs> + +![image](https://user-images.githubusercontent.com/300/153928306-5e0979c6-2049-4039-87a2-284a4010283a.png) + +Note that the form and its inputs are set to 100% width. Again, the form shouldn't be dictating anything about its layout that its parent should be responsible for, like how wide the inputs are. Those should be determined by whatever contains it so that it looks good with the rest of the content on the page. So the form will be 100% wide and the parent (whoever that ends up being) will decide how wide it really is on the page. + +You can even try submitting the form right in Storybook! If you leave "name" or "comment" blank then they should get focus when you try to submit, indicating that they are required. If you fill them both in and click **Submit** nothing happens because we haven't hooked up the submit yet. Let's do that now. + +### Submitting + +Submitting the form should use the `createComment` function we added to our services and GraphQL. We'll need to add a mutation to the form component and an `onSubmit` handler to the form so that the create can be called with the data in the form. And since `createComment` could return an error we'll add the **FormError** component to display it: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/CommentForm/CommentForm.jsx" +import { + Form, + // highlight-next-line + FormError, + Label, + TextField, + TextAreaField, + Submit, +} from '@redwoodjs/forms' +// highlight-next-line +import { useMutation } from '@redwoodjs/web' + +// highlight-start +const CREATE = gql` + mutation CreateCommentMutation($input: CreateCommentInput!) { + createComment(input: $input) { + id + name + body + createdAt + } + } +` +// highlight-end + +const CommentForm = () => { + // highlight-next-line + const [createComment, { loading, error }] = useMutation(CREATE) + + // highlight-start + const onSubmit = (input) => { + createComment({ variables: { input } }) + } + // highlight-end + + return ( + <div> + <h3 className="font-light text-lg text-gray-600">Leave a Comment</h3> + // highlight-start + <Form className="mt-4 w-full" onSubmit={onSubmit}> + <FormError + error={error} + titleClassName="font-semibold" + wrapperClassName="bg-red-100 text-red-900 text-sm p-3 rounded" + /> + // highlight-end + <Label + name="name" + className="block text-xs font-semibold text-gray-500 uppercase" + > + Name + </Label> + <TextField + name="name" + className="block w-full p-1 border rounded text-sm " + validation={{ required: true }} + /> + + <Label + name="body" + className="block mt-4 text-xs font-semibold text-gray-500 uppercase" + > + Comment + </Label> + <TextAreaField + name="body" + className="block w-full p-1 border rounded h-24 text-sm" + validation={{ required: true }} + /> + + <Submit + // highlight-next-line + disabled={loading} + className="block mt-4 bg-blue-500 text-white text-xs font-semibold uppercase tracking-wide rounded px-3 py-2 disabled:opacity-50" + > + Submit + </Submit> + </Form> + </div> + ) +} + +export default CommentForm +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/components/CommentForm/CommentForm.tsx" +import { + Form, + // highlight-next-line + FormError, + Label, + TextField, + TextAreaField, + Submit, + // highlight-next-line + SubmitHandler, +} from '@redwoodjs/forms' +// highlight-next-line +import { useMutation } from '@redwoodjs/web' + +// highlight-start +const CREATE = gql` + mutation CreateCommentMutation($input: CreateCommentInput!) { + createComment(input: $input) { + id + name + body + createdAt + } + } +` +// highlight-end + +// highlight-start +interface FormValues { + name: string + comment: string +} +// highlight-end + +const CommentForm = () => { + // highlight-next-line + const [createComment, { loading, error }] = useMutation(CREATE) + + // highlight-start + const onSubmit: SubmitHandler<FormValues> = (input) => { + createComment({ variables: { input } }) + } + // highlight-end + + return ( + <div> + <h3 className="font-light text-lg text-gray-600">Leave a Comment</h3> + // highlight-start + <Form className="mt-4 w-full" onSubmit={onSubmit}> + <FormError + error={error} + titleClassName="font-semibold" + wrapperClassName="bg-red-100 text-red-900 text-sm p-3 rounded" + /> + // highlight-end + <Label + name="name" + className="block text-xs font-semibold text-gray-500 uppercase" + > + Name + </Label> + <TextField + name="name" + className="block w-full p-1 border rounded text-sm " + validation={{ required: true }} + /> + + <Label + name="body" + className="block mt-4 text-xs font-semibold text-gray-500 uppercase" + > + Comment + </Label> + <TextAreaField + name="body" + className="block w-full p-1 border rounded h-24 text-sm" + validation={{ required: true }} + /> + + <Submit + // highlight-next-line + disabled={loading} + className="block mt-4 bg-blue-500 text-white text-xs font-semibold uppercase tracking-wide rounded px-3 py-2 disabled:opacity-50" + > + Submit + </Submit> + </Form> + </div> + ) +} + +export default CommentForm +``` + +</TabItem> +</Tabs> + +If you try to submit the form you'll get an error in the web console—Storybook will automatically mock GraphQL queries, but not mutations. But, we can mock the request in the story and handle the response manually: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/CommentForm/CommentForm.stories.jsx" +import CommentForm from './CommentForm' + +export const generated = () => { + // highlight-start + mockGraphQLMutation('CreateCommentMutation', (variables, { ctx }) => { + const id = Math.floor(Math.random() * 1000) + ctx.delay(1000) + + return { + createComment: { + id, + name: variables.input.name, + body: variables.input.body, + createdAt: new Date().toISOString(), + }, + } + }) + // highlight-end + + return <CommentForm /> +} + +export default { title: 'Components/CommentForm' } +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/components/CommentForm/CommentForm.stories.tsx" +import CommentForm from './CommentForm' + +// highlight-start +import type { + CreateCommentMutation, + CreateCommentMutationVariables, +} from 'types/graphql' +// highlight-end + +export const generated = () => { + // highlight-start + mockGraphQLMutation<CreateCommentMutation, CreateCommentMutationVariables>( + 'CreateCommentMutation', + (variables, { ctx }) => { + const id = Math.floor(Math.random() * 1000) + ctx.delay(1000) + + return { + createComment: { + id, + name: variables.input.name, + body: variables.input.body, + createdAt: new Date().toISOString(), + }, + } + } + ) + // highlight-end + + return <CommentForm /> +} + +export default { title: 'Components/CommentForm' } +``` + +</TabItem> +</Tabs> + +:::info + +If you still get an error, try reloading the Storybook tab in the browser. + +::: + +To use `mockGraphQLMutation` you call it with the name of the mutation you want to intercept and then the function that will handle the interception and return a response. The arguments passed to that function give us some flexibility in how we handle the response. + +In our case we want the `variables` that were passed to the mutation (the `name` and `body`) as well as the context object (abbreviated as `ctx`) so that we can add a delay to simulate a round trip to the server. This will let us test that the **Submit** button is disabled for that one second and you can't submit a second comment while the first one is still being saved. + +Try out the form now and the error should be gone. Also the **Submit** button should become visually disabled and clicking it during that one second delay does nothing. + +### Adding the Form to the Blog Post + +Right above the display of existing comments on a blog post is probably where our form should go. So should we add it to the `Article` component along with the `CommentsCell` component? If wherever we display a list of comments we'll also include the form to add a new one, that feels like it may as well just go into the `CommentsCell` component itself. However, this presents a problem: + +If we put the `CommentForm` in the `Success` component of `CommentsCell` then what happens when there are no comments yet? The `Empty` component renders, which doesn't include the form! So it becomes impossible to add the first comment. + +We could copy the `CommentForm` to the `Empty` component as well, but as soon as you find yourself duplicating code like this it can be a hint that you need to rethink something about your design. + +Maybe `CommentsCell` should really only be responsible for retrieving and displaying comments. Having it also accept user input seems outside of its primary concern. + +So let's use `Article` as the cleaning house for where all these disparate parts are combined—the actual blog post, the form to add a new comment, and the list of comments (and a little margin between them): + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/Article/Article.jsx" +import { Link, routes } from '@redwoodjs/router' + +// highlight-next-line +import CommentForm from 'src/components/CommentForm' +import CommentsCell from 'src/components/CommentsCell' + +const truncate = (text, length) => { + return text.substring(0, length) + '...' +} + +const Article = ({ article, summary = false }) => { + return ( + <article> + <header> + <h2 className="text-xl text-blue-700 font-semibold"> + <Link to={routes.article({ id: article.id })}>{article.title}</Link> + </h2> + </header> + <div className="mt-2 text-gray-900 font-light"> + {summary ? truncate(article.body, 100) : article.body} + </div> + {!summary && ( + // highlight-start + <div className="mt-12"> + <CommentForm /> + // highlight-end + <div className="mt-12"> + <CommentsCell /> + </div> + // highlight-next-line + </div> + )} + </article> + ) +} + +export default Article +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/components/Article/Article.tsx" +import { Link, routes } from '@redwoodjs/router' + +// highlight-next-line +import CommentForm from 'src/components/CommentForm' +import CommentsCell from 'src/components/CommentsCell' + +import type { Post } from 'types/graphql' + +const truncate = (text: string, length: number) => { + return text.substring(0, length) + '...' +} + +const Article = ({ article, summary = false }) => { + return ( + <article> + <header> + <h2 className="text-xl text-blue-700 font-semibold"> + <Link to={routes.article({ id: article.id })}>{article.title}</Link> + </h2> + </header> + <div className="mt-2 text-gray-900 font-light"> + {summary ? truncate(article.body, 100) : article.body} + </div> + {!summary && ( + // highlight-start + <div className="mt-12"> + <CommentForm /> + // highlight-end + <div className="mt-12"> + <CommentsCell /> + </div> + // highlight-next-line + </div> + )} + </article> + ) +} + +export default Article +``` + +</TabItem> +</Tabs> + +![image](https://user-images.githubusercontent.com/300/153929564-59bcafd6-f3a3-437e-86d9-b92753b7fe9b.png) + +Looks great in Storybook, how about on the real site? + +![image](https://user-images.githubusercontent.com/300/153929680-a33e5332-2e02-423e-9ca5-4757ad8dbbb5.png) + +Now comes the ultimate test: creating a comment! LET'S DO IT: + +![image](https://user-images.githubusercontent.com/300/153929833-f2a3e38d-c70e-4f64-ade1-4327a7f47193.png) + +What happened here? Notice towards the end of the error message: `Field "postId" of required type "Int!" was not provided`. When we created our data schema we said that a post belongs to a comment via the `postId` field. And that field is required, so the GraphQL server is rejecting the request because we're not including that field. We're only sending `name` and `body`. Luckily we have access to the ID of the post we're commenting on thanks to the `article` object that's being passed into `Article` itself! + +:::info Why didn't the Storybook story we wrote earlier expose this problem? + +We manually mocked the GraphQL response in the story, and our mock always returns a correct response, regardless of the input! + +There's always a tradeoff when creating mock data—it greatly simplifies testing by not having to rely on the entire GraphQL stack, but that means if you want it to be as accurate as the real thing you basically need to *re-write the real thing in your mock*. In this case, leaving out the `postId` was a one-time fix so it's probably not worth going through the work of creating a story/mock/test that simulates what would happen if we left it off. + +But, if `CommentForm` ended up being a component that was re-used throughout your application, or the code itself will go through a lot of churn because other developers will constantly be making changes to it, it might be worth investing the time to make sure the interface (the props passed to it and the expected return) are exactly what you want them to be. + +::: + +First let's pass the post's ID as a prop to `CommentForm`: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/Article/Article.jsx" +import { Link, routes } from '@redwoodjs/router' +import CommentsCell from 'src/components/CommentsCell' +import CommentForm from 'src/components/CommentForm' + +const truncate = (text, length) => { + return text.substring(0, length) + '...' +} + +const Article = ({ article, summary = false }) => { + return ( + <article> + <header> + <h2 className="text-xl text-blue-700 font-semibold"> + <Link to={routes.article({ id: article.id })}>{article.title}</Link> + </h2> + </header> + <div className="mt-2 text-gray-900 font-light"> + {summary ? truncate(article.body, 100) : article.body} + </div> + {!summary && ( + <div className="mt-12"> + // highlight-next-line + <CommentForm postId={article.id} /> + <div className="mt-12"> + <CommentsCell /> + </div> + </div> + )} + </article> + ) +} + +export default Article +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/components/Article/Article.tsx" +import { Link, routes } from '@redwoodjs/router' +import CommentsCell from 'src/components/CommentsCell' +import CommentForm from 'src/components/CommentForm' + +const truncate = (text: string, length: number) => { + return text.substring(0, length) + '...' +} + +const Article = ({ article, summary = false }) => { + return ( + <article> + <header> + <h2 className="text-xl text-blue-700 font-semibold"> + <Link to={routes.article({ id: article.id })}>{article.title}</Link> + </h2> + </header> + <div className="mt-2 text-gray-900 font-light"> + {summary ? truncate(article.body, 100) : article.body} + </div> + {!summary && ( + <div className="mt-12"> + // highlight-next-line + <CommentForm postId={article.id} /> + <div className="mt-12"> + <CommentsCell /> + </div> + </div> + )} + </article> + ) +} + +export default Article +``` + +</TabItem> +</Tabs> + +And then we'll append that ID to the `input` object that's being passed to `createComment` in the `CommentForm`: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/CommentForm/CommentForm.jsx" +// highlight-next-line +const CommentForm = ({ postId }) => { + const [createComment, { loading, error }] = useMutation(CREATE) + + const onSubmit = (input) => { + // highlight-next-line + createComment({ variables: { input: { postId, ...input } } }) + } + + return ( + //... + ) +} +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/components/CommentForm/CommentForm.tsx" +// highlight-start +interface Props { + postId: number +} +// highlight-end + +// highlight-next-line +const CommentForm = ({ postId }: Props) => { + const [createComment, { loading, error }] = useMutation(CREATE) + + const onSubmit: SubmitHandler<FormValues> = (input) => { + // highlight-next-line + createComment({ variables: { input: { postId, ...input } } }) + } + + return ( + //... + ) +} +``` + +</TabItem> +</Tabs> + +Now fill out the comment form and submit! And...nothing happened! Believe it or not that's actually an improvement in the situation—no more error! What if we reload the page? + +![image](https://user-images.githubusercontent.com/300/153930645-c5233fb5-ad7f-4a03-8707-3cd6164bb277.png) + +Yay! It would have been nicer if that comment appeared as soon as we submitted the comment, so maybe that's a half-yay? Also, the text boxes stayed filled with our name/messages (before we reloaded the page) which isn't ideal. But, we can fix both of those. One involves telling the GraphQL client (Apollo) that we created a new record and, if it would be so kind, to try the query again that gets the comments for this page, and we'll fix the other by just removing the form from the page completely when a new comment is submitted. + +### GraphQL Query Caching + +Much has been written about the [complexities](https://medium.com/swlh/how-i-met-apollo-cache-ee804e6485e9) of [Apollo](https://medium.com/@galen.corey/understanding-apollo-fetch-policies-705b5ad71980) [caching](https://levelup.gitconnected.com/basics-of-caching-data-in-graphql-7ce9489dac15), but for the sake of brevity (and sanity) we're going to do the easiest thing that works, and that's tell Apollo to just re-run the query that shows comments in the cell, known as "refetching." + +Along with the variables you pass to a mutation function (`createComment` in our case) there's an option named `refetchQueries` where you pass an array of queries that should be re-run because, presumably, the data you just mutated is reflected in the result of those queries. In our case there's a single query, the `QUERY` export of `CommentsCell`. We'll import that at the top of `CommentForm` (and rename so it's clear what it is to the rest of our code) and then pass it along to the `refetchQueries` option: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/CommentForm/CommentForm.jsx" +import { + Form, + FormError, + Label, + TextField, + TextAreaField, + Submit, +} from '@redwoodjs/forms' +import { useMutation } from '@redwoodjs/web' + +// highlight-next-line +import { QUERY as CommentsQuery } from 'src/components/CommentsCell' + +// ... + +const CommentForm = ({ postId }) => { + // highlight-start + const [createComment, { loading, error }] = useMutation(CREATE, { + refetchQueries: [{ query: CommentsQuery }], + }) + // highlight-end + + //... +} +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/components/CommentForm/CommentForm.tsx" +import { + Form, + FormError, + Label, + TextField, + TextAreaField, + Submit, +} from '@redwoodjs/forms' +import { useMutation } from '@redwoodjs/web' + +// highlight-next-line +import { QUERY as CommentsQuery } from 'src/components/CommentsCell' + +// ... + +const CommentForm = ({ postId }: Props) => { + // highlight-start + const [createComment, { loading, error }] = useMutation(CREATE, { + refetchQueries: [{ query: CommentsQuery }], + }) + // highlight-end + + //... +} +``` + +</TabItem> +</Tabs> + +Now when we create a comment it appears right away! It might be hard to tell because it's at the bottom of the comments list (which is a fine position if you want to read comments in chronological order, oldest to newest). Let's pop up a little notification that the comment was successful to let the user know their contribution was successful in case they don't realize it was added to the end of the page. + +We'll make use of good old fashioned React state to keep track of whether a comment has been posted in the form yet or not. If so, let's remove the comment form completely and show a "Thanks for your comment" message. Redwood includes [react-hot-toast](https://react-hot-toast.com/) for showing popup notifications, so let's use that to thank the user for their comment. We'll remove the form with just a couple of CSS classes: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/CommentForm/CommentForm.jsx" +// highlight-next-line +import { useState } from 'react' + +import { + Form, + FormError, + Label, + TextField, + TextAreaField, + Submit, +} from '@redwoodjs/forms' +import { useMutation } from '@redwoodjs/web' +// highlight-next-line +import { toast } from '@redwoodjs/web/toast' + +import { QUERY as CommentsQuery } from 'src/components/CommentsCell' + +const CREATE = gql` + mutation CreateCommentMutation($input: CreateCommentInput!) { + createComment(input: $input) { + id + name + body + createdAt + } + } +` + +const CommentForm = ({ postId }) => { + // highlight-next-line + const [hasPosted, setHasPosted] = useState(false) + const [createComment, { loading, error }] = useMutation(CREATE, { + // highlight-start + onCompleted: () => { + setHasPosted(true) + toast.success('Thank you for your comment!') + }, + // highlight-end + refetchQueries: [{ query: CommentsQuery }], + }) + + const onSubmit = (input) => { + createComment({ variables: { input: { postId, ...input } } }) + } + + return ( + // highlight-next-line + <div className={hasPosted ? 'hidden' : ''}> + <h3 className="font-light text-lg text-gray-600">Leave a Comment</h3> + <Form className="mt-4 w-full" onSubmit={onSubmit}> + <FormError + error={error} + titleClassName="font-semibold" + wrapperClassName="bg-red-100 text-red-900 text-sm p-3 rounded" + /> + <Label + name="name" + className="block text-xs font-semibold text-gray-500 uppercase" + > + Name + </Label> + <TextField + name="name" + className="block w-full p-1 border rounded text-sm " + validation={{ required: true }} + /> + + <Label + name="body" + className="block mt-4 text-xs font-semibold text-gray-500 uppercase" + > + Comment + </Label> + <TextAreaField + name="body" + className="block w-full p-1 border rounded h-24 text-sm" + validation={{ required: true }} + /> + + <Submit + disabled={loading} + className="block mt-4 bg-blue-500 text-white text-xs font-semibold uppercase tracking-wide rounded px-3 py-2 disabled:opacity-50" + > + Submit + </Submit> + </Form> + </div> + ) +} + +export default CommentForm +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/components/CommentForm/CommentForm.tsx" +// highlight-next-line +import { useState } from 'react' + +import { + Form, + FormError, + Label, + TextField, + TextAreaField, + Submit, +} from '@redwoodjs/forms' +import { useMutation } from '@redwoodjs/web' +// highlight-next-line +import { toast } from '@redwoodjs/web/toast' + +import { QUERY as CommentsQuery } from 'src/components/CommentsCell' + +const CREATE = gql` + mutation CreateCommentMutation($input: CreateCommentInput!) { + createComment(input: $input) { + id + name + body + createdAt + } + } +` + +interface FormValues { + name: string + email: string + message: string +} + +interface Props { + postId: number +} + +const CommentForm = ({ postId }: Props) => { + // highlight-next-line + const [hasPosted, setHasPosted] = useState(false) + const [createComment, { loading, error }] = useMutation(CREATE, { + // highlight-start + onCompleted: () => { + setHasPosted(true) + toast.success('Thank you for your comment!') + }, + // highlight-end + refetchQueries: [{ query: CommentsQuery }], + }) + + const onSubmit: SubmitHandler<FormValues> = (input) => { + createComment({ variables: { input: { postId, ...input } } }) + } + + return ( + // highlight-next-line + <div className={hasPosted ? 'hidden' : ''}> + <h3 className="font-light text-lg text-gray-600">Leave a Comment</h3> + <Form className="mt-4 w-full" onSubmit={onSubmit}> + <FormError + error={error} + titleClassName="font-semibold" + wrapperClassName="bg-red-100 text-red-900 text-sm p-3 rounded" + /> + <Label + name="name" + className="block text-xs font-semibold text-gray-500 uppercase" + > + Name + </Label> + <TextField + name="name" + className="block w-full p-1 border rounded text-sm " + validation={{ required: true }} + /> + + <Label + name="body" + className="block mt-4 text-xs font-semibold text-gray-500 uppercase" + > + Comment + </Label> + <TextAreaField + name="body" + className="block w-full p-1 border rounded h-24 text-sm" + validation={{ required: true }} + /> + + <Submit + disabled={loading} + className="block mt-4 bg-blue-500 text-white text-xs font-semibold uppercase tracking-wide rounded px-3 py-2 disabled:opacity-50" + > + Submit + </Submit> + </Form> + </div> + ) +} + +export default CommentForm +``` + +</TabItem> +</Tabs> + +![image](https://user-images.githubusercontent.com/300/153932278-6e504b6b-9e8e-400e-98fb-8bfeefbe3812.png) + +We used `hidden` to just hide the form and "Leave a comment" title completely from the page, but keeps the component itself mounted. But where's our "Thank you for your comment" notification? We still need to add the `Toaster` component (from react-hot-toast) somewhere in our app so that the message can actually be displayed. We could just add it here, in `CommentForm`, but what if we want other code to be able to post notifications, even when `CommentForm` isn't mounted? Where's the one place we put UI elements that should be visible everywhere? The `BlogLayout`! + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/layouts/BlogLayout/BlogLayout.jsx" +import { Link, routes } from '@redwoodjs/router' +// highlight-next-line +import { Toaster } from '@redwoodjs/web/toast' + +import { useAuth } from 'src/auth' + +const BlogLayout = ({ children }) => { + const { logOut, isAuthenticated, currentUser } = useAuth() + + return ( + <> + // highlight-next-line + <Toaster /> + <header className="relative flex justify-between items-center py-4 px-8 bg-blue-700 text-white"> + <h1 className="text-5xl font-semibold tracking-tight"> + <Link + className="text-blue-400 hover:text-blue-100 transition duration-100" + to={routes.home()} + > + Redwood Blog + </Link> + </h1> + <nav> + <ul className="relative flex items-center font-light"> + <li> + <Link + className="py-2 px-4 hover:bg-blue-600 transition duration-100 rounded" + to={routes.about()} + > + About + </Link> + </li> + <li> + <Link + className="py-2 px-4 hover:bg-blue-600 transition duration-100 rounded" + to={routes.contact()} + > + Contact + </Link> + </li> + <li> + {isAuthenticated ? ( + <div> + <button type="button" onClick={logOut} className="py-2 px-4"> + Logout + </button> + </div> + ) : ( + <Link to={routes.login()} className="py-2 px-4"> + Login + </Link> + )} + </li> + </ul> + {isAuthenticated && ( + <div className="absolute bottom-1 right-0 mr-12 text-xs text-blue-300"> + {currentUser.email} + </div> + )} + </nav> + </header> + <main className="max-w-4xl mx-auto p-12 bg-white shadow rounded-b"> + {children} + </main> + </> + ) +} + +export default BlogLayout +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/layouts/BlogLayout/BlogLayout.tsx" +import { Link, routes } from '@redwoodjs/router' +// highlight-next-line +import { Toaster } from '@redwoodjs/web/toast' + +import { useAuth } from 'src/auth' + +type BlogLayoutProps = { + children?: React.ReactNode +} + +const BlogLayout = ({ children }: BlogLayoutProps) => { + const { logOut, isAuthenticated, currentUser } = useAuth() + + return ( + <> + // highlight-next-line + <Toaster /> + <header className="relative flex justify-between items-center py-4 px-8 bg-blue-700 text-white"> + <h1 className="text-5xl font-semibold tracking-tight"> + <Link + className="text-blue-400 hover:text-blue-100 transition duration-100" + to={routes.home()} + > + Redwood Blog + </Link> + </h1> + <nav> + <ul className="relative flex items-center font-light"> + <li> + <Link + className="py-2 px-4 hover:bg-blue-600 transition duration-100 rounded" + to={routes.about()} + > + About + </Link> + </li> + <li> + <Link + className="py-2 px-4 hover:bg-blue-600 transition duration-100 rounded" + to={routes.contact()} + > + Contact + </Link> + </li> + <li> + {isAuthenticated ? ( + <div> + <button type="button" onClick={logOut} className="py-2 px-4"> + Logout + </button> + </div> + ) : ( + <Link to={routes.login()} className="py-2 px-4"> + Login + </Link> + )} + </li> + </ul> + {isAuthenticated && ( + <div className="absolute bottom-1 right-0 mr-12 text-xs text-blue-300"> + {currentUser.email} + </div> + )} + </nav> + </header> + <main className="max-w-4xl mx-auto p-12 bg-white shadow rounded-b"> + {children} + </main> + </> + ) +} + +export default BlogLayout +``` + +</TabItem> +</Tabs> + +Now add a comment: + +![image](https://user-images.githubusercontent.com/300/153933162-079ac322-acde-4ea0-b43e-58b53fb85d98.png) + +### Almost Done? + +So it looks like we're just about done here! Try going back to the homepage and go to another blog post. Let's bask in the glory of our amazing coding abilities and—OH NO: + +![image](https://user-images.githubusercontent.com/300/153933665-83158870-8422-4da9-9809-7d3b51444a14.png) + +All posts have the same comments! **WHAT HAVE WE DONE??** + +Remember our foreshadowing callout a few pages back, wondering if our `comments()` service which only returns *all* comments could come back to bite us? It finally has: when we get the comments for a post we're not actually getting them for only that post. We're ignoring the `postId` completely and just returning *all* comments in the database! Turns out the old axiom is true: computers only do exactly what you tell them to do. + +Let's fix it! + +### Returning Only Some Comments + +We'll need to make both frontend and backend changes to get only some comments to show. Let's start with the backend and do a little test-driven development to make this change. + +#### Introducing the Redwood Console + +It would be nice if we could try out sending some arguments to our Prisma calls and be sure that we can request a single post's comments without having to write the whole stack into the app (component/cell, GraphQL, service) just to see if it works. + +That's where the Redwood Console comes in! In a new terminal instance, try this: + +```bash +yarn rw console +``` + +You'll see a standard Node console but with most of Redwood's internals already imported and ready to go! Most importantly, that includes the database. Try it out: + +```bash +> db.comment.findMany() +[ + { + id: 1, + name: 'Rob', + body: 'The first real comment!', + postId: 1, + createdAt: 2020-12-08T23:45:10.641Z + }, + { + id: 2, + name: 'Tom', + body: 'Here is another comment', + postId: 1, + createdAt: 2020-12-08T23:46:10.641Z + } +] +``` + +(Output will be slightly different, of course, depending on what comments you already have in your database.) + +Let's try the syntax that will allow us to only get comments for a given `postId`: + +```bash +> db.comment.findMany({ where: { postId: 1 }}) +[ + { + id: 1, + name: 'Rob', + body: 'The first real comment!', + postId: 1, + createdAt: 2020-12-08T23:45:10.641Z + }, + { + id: 2, + name: 'Tom', + body: 'Here is another comment', + postId: 1, + createdAt: 2020-12-08T23:46:10.641Z + } +] +``` + +Well it worked, but the list is exactly the same. That's because we've only added comments for a single post! Let's create a comment for a second post and make sure that only those comments for a specific `postId` are returned. + +We'll need the `id` of another post. Make sure you have at least two (create one through the admin if you need to). We can get a list of all the existing posts and copy the `id`: + +```bash +> db.post.findMany({ select: { id: true } }) +[ { id: 1 }, { id: 2 }, { id: 3 } ] +``` + +Okay, now let's create a comment for that second post via the console: + +```bash +> db.comment.create({ data: { name: 'Peter', body: 'I also like leaving comments', postId: 2 } }) +{ + id: 3, + name: 'Peter', + body: 'I also like leaving comments', + postId: 2, + createdAt: 2020-12-08T23:47:10.641Z +} +``` + +Now we'll try our comment query again, once with each `postId`: + +```bash +> db.comment.findMany({ where: { postId: 1 }}) +[ + { + id: 1, + name: 'Rob', + body: 'The first real comment!', + postId: 1, + createdAt: 2020-12-08T23:45:10.641Z + }, + { + id: 2, + name: 'Tom', + body: 'Here is another comment', + postId: 1, + createdAt: 2020-12-08T23:46:10.641Z + } +] + +> db.comment.findMany({ where: { postId: 2 }}) +[ + { + id: 3, + name: 'Peter', + body: 'I also like leaving comments', + postId: 2, + createdAt: 2020-12-08T23:45:10.641Z + }, + +``` + +Great! Now that we've tested out the syntax let's use that in the service. You can exit the console by pressing Ctrl-C twice or typing `.exit` + +:::info Where's the `await`? + +Calls to `db` return a Promise, which you would normally need to add an `await` to in order to get the results right away. Having to add `await` every time is pretty annoying though, so the Redwood console does it for you—Redwood `await`s so you don't have to! + +::: + +#### Updating the Service + +Try running the test suite (or if it's already running take a peek at that terminal window) and make sure all of our tests still pass. The "lowest level" of the api-side is the services, so let's start there. + +:::tip + +One way to think about your codebase is a "top to bottom" view where the top is what's "closest" to the user and what they interact with (React components) and the bottom is the "farthest" thing from them, in the case of a web application that would usually be a database or other data store (behind a third party API, perhaps). One level above the database are the services, which directly communicate to the database: + +``` + Browser + | + React ─┐ + | │ + Graph QL ├─ Redwood + | │ + Services ─┘ + | + Database +``` + +There are no hard and fast rules here, but generally the farther down you put your business logic (the code that deals with moving and manipulating data) the easier it will be to build and maintain your application. Redwood encourages you to put your business logic in services since they're "closest" to the data and behind the GraphQL interface. + +::: + +Open up the **comments** service test and let's update it to pass the `postId` argument to the `comments()` function like we tested out in the console: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```javascript title="api/src/services/comments/comments.test.js" +scenario('returns all comments', async (scenario) => { + // highlight-next-line + const result = await comments({ postId: scenario.comment.jane.postId }) + expect(result.length).toEqual(Object.keys(scenario.comment).length) +}) +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```ts title="api/src/services/comments/comments.test.ts" +scenario('returns all comments', async (scenario: StandardScenario) => { + // highlight-next-line + const result = await comments({ postId: scenario.comment.jane.postId }) + expect(result.length).toEqual(Object.keys(scenario.comment).length) +}) +``` + +</TabItem> +</Tabs> + +When the test suite runs everything will still pass. JavaScript won't care if you're passing an argument all of a sudden (although if you were using Typescript you will actually get an error at this point!). In TDD you generally want to get your test to fail before adding code to the thing you're testing which will then cause the test to pass. What's something in this test that will be different once we're only returning *some* comments? How about the number of comments expected to be returned? + +Let's take a look at the scenario we're using (remember, it's `standard()` by default): + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```javascript title="api/src/services/comments/comments.scenarios.js" +export const standard = defineScenario({ + comment: { + jane: { + data: { + name: 'Jane Doe', + body: 'I like trees', + post: { + create: { + title: 'Redwood Leaves', + body: 'The quick brown fox jumped over the lazy dog.', + }, + }, + }, + }, + john: { + data: { + name: 'John Doe', + body: 'Hug a tree today', + post: { + create: { + title: 'Root Systems', + body: 'The five boxing wizards jump quickly.', + }, + }, + }, + }, + }, +}) +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```javascript title="api/src/services/comments/comments.scenarios.ts" +export const standard = defineScenario({ + comment: { + jane: { + data: { + name: 'Jane Doe', + body: 'I like trees', + post: { + create: { + title: 'Redwood Leaves', + body: 'The quick brown fox jumped over the lazy dog.', + }, + }, + }, + }, + john: { + data: { + name: 'John Doe', + body: 'Hug a tree today', + post: { + create: { + title: 'Root Systems', + body: 'The five boxing wizards jump quickly.', + }, + }, + }, + }, + }, +}) +``` + +</TabItem> +</Tabs> + +Each scenario here is associated with its own post, so rather than counting all the comments in the database (like the test does now) let's only count the number of comments attached to the single post we're getting comments for (we're passing the postId into the `comments()` call now). Let's see what it looks like in test form: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="api/src/services/comments/comments.test.jsx" +import { comments, createComment } from './comments' +// highlight-next-line +import { db } from 'src/lib/db' + +describe('comments', () => { + scenario('returns all comments', async (scenario) => { + const result = await comments({ postId: scenario.comment.jane.postId }) + // highlight-start + const post = await db.post.findUnique({ + where: { id: scenario.comment.jane.postId }, + include: { comments: true }, + }) + expect(result.length).toEqual(post.comments.length) + // highlight-end + }) + + // ... +}) +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="api/src/services/comments/comments.test.ts" +import { comments, createComment } from './comments' +// highlight-next-line +import { db } from 'src/lib/db' + +import type { StandardScenario } from './comments.scenarios' + +describe('comments', () => { + scenario('returns all comments', async (scenario) => { + const result = await comments({ postId: scenario.comment.jane.postId }) + // highlight-start + const post = await db.post.findUnique({ + where: { id: scenario.comment.jane.postId }, + include: { comments: true }, + }) + expect(result.length).toEqual(post.comments.length) + // highlight-end + }) + + // ... +}) +``` + +</TabItem> +</Tabs> + +So we're first getting the result from the services, all the comments for a given `postId`. Then we pull the *actual* post from the database and include its comments. Then we expect that the number of comments returned from the service is the same as the number of comments actually attached to the post in the database. Now the test fails and you can see why in the output: + +```bash + FAIL api api/src/services/comments/comments.test.js + • comments › returns all comments + + expect(received).toEqual(expected) // deep equality + + Expected: 1 + Received: 2 +``` + +So we expected to receive 1 (from `post.comments.length`), but we actually got 2 (from `result.length`). + +Before we get it passing again, let's also change the name of the test to reflect what it's actually testing: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```javascript title="api/src/services/comments/comments.test.js" +// highlight-start +scenario( + 'returns all comments for a single post from the database', + // highlight-end + async (scenario) => { + const result = await comments({ postId: scenario.comment.jane.postId }) + const post = await db.post.findUnique({ + where: { id: scenario.comment.jane.postId }, + include: { comments: true }, + }) + expect(result.length).toEqual(post.comments.length) + } +) +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```javascript title="api/src/services/comments/comments.test.ts" +// highlight-start +scenario( + 'returns all comments for a single post from the database', + // highlight-end + async (scenario: StandardScenario) => { + const result = await comments({ postId: scenario.comment.jane.postId }) + const post = await db.post.findUnique({ + where: { id: scenario.comment.jane.postId }, + include: { comments: true }, + }) + expect(result.length).toEqual(post.comments.length) + } +) +``` + +</TabItem> +</Tabs> + +Okay, open up the actual `comments.js` service and we'll update it to accept the `postId` argument and use it as an option to `findMany()` (be sure to update the `comments()` function [with an "s"] and not the unused `comment()` function): + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```javascript title="api/src/services/comments/comments.js" +export const comments = ({ postId }) => { + return db.comment.findMany({ where: { postId } }) +} +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```ts title="api/src/services/comments/comments.ts" +export const comments = ({ + postId, +}: Required<Pick<Prisma.CommentWhereInput, 'postId'>>) => { + return db.comment.findMany({ where: { postId } }) +} +``` + +</TabItem> +</Tabs> + +Save that and the test should pass again! + +#### Updating GraphQL + +Next we need to let GraphQL know that it should expect a `postId` to be passed for the `comments` query, and it's required (we don't currently have any view that allows you see all comments everywhere so we can ask that it always be present). Open up the `comments.sdl.{js,ts}` file: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```graphql title="api/src/graphql/comments.sdl.js" +type Query { + // highlight-next-line + comments(postId: Int!): [Comment!]! @skipAuth +} +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```graphql title="api/src/graphql/comments.sdl.ts" +type Query { + // highlight-next-line + comments(postId: Int!): [Comment!]! @skipAuth +} +``` + +</TabItem> +</Tabs> + +Now if you try refreshing the real site in dev mode you'll see an error where the comments should be displayed: + +![image](https://user-images.githubusercontent.com/300/198095941-bbd07ede-2006-422a-8635-ea8fe57dd403.png) + +For security reasons we don't show the internal error message here, but if you check the terminal window where `yarn rw dev` is running you'll see the real message: + +```text +Field "comments" argument "postId" of type "Int!" is required, but it was not provided. +``` + +And yep, it's complaining about `postId` not being present—exactly what we want! + +That completes the backend updates, now we just need to tell `CommentsCell` to pass through the `postId` to the GraphQL query it makes. + +#### Updating the Cell + +First we'll need to get the `postId` to the cell itself. Remember when we added a `postId` prop to the `CommentForm` component so it knew which post to attach the new comment to? Let's do the same for `CommentsCell`. + +Open up `Article`: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/Article/Article.jsx" +const Article = ({ article, summary = false }) => { + return ( + <article> + <header> + <h2 className="text-xl text-blue-700 font-semibold"> + <Link to={routes.article({ id: article.id })}>{article.title}</Link> + </h2> + </header> + <div className="mt-2 text-gray-900 font-light"> + {summary ? truncate(article.body, 100) : article.body} + </div> + {!summary && ( + <div className="mt-12"> + <CommentForm postId={article.id} /> + <div className="mt-12"> + // highlight-next-line + <CommentsCell postId={article.id} /> + </div> + </div> + )} + </article> + ) +} +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/components/Article/Article.tsx" +const Article = ({ article, summary = false }) => { + return ( + <article> + <header> + <h2 className="text-xl text-blue-700 font-semibold"> + <Link to={routes.article({ id: article.id })}>{article.title}</Link> + </h2> + </header> + <div className="mt-2 text-gray-900 font-light"> + {summary ? truncate(article.body, 100) : article.body} + </div> + {!summary && ( + <div className="mt-12"> + <CommentForm postId={article.id} /> + <div className="mt-12"> + // highlight-next-line + <CommentsCell postId={article.id} /> + </div> + </div> + )} + </article> + ) +} +``` + +</TabItem> +</Tabs> + +And finally, we need to take that `postId` and pass it on to the `QUERY` in the cell: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```graphql title="web/src/components/CommentsCell/CommentsCell.jsx" +export const QUERY = gql` + // highlight-start + query CommentsQuery($postId: Int!) { + comments(postId: $postId) { + // highlight-end + id + name + body + createdAt + } + } +` +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```graphql title="web/src/components/CommentsCell/CommentsCell.tsx" +export const QUERY = gql` + // highlight-start + query CommentsQuery($postId: Int!) { + comments(postId: $postId) { + // highlight-end + id + name + body + createdAt + } + } +` +``` + +</TabItem> +</Tabs> + +Where does this magical `$postId` come from? Redwood is nice enough to automatically provide it to you since you passed it in as a prop when you called the component! + +Try going to a couple of different blog posts and you should see only comments associated to the proper posts (including the one we created in the console). You can add a comment to each blog post individually and they'll stick to their proper owners: + +![image](https://user-images.githubusercontent.com/300/100954162-de24f680-34c8-11eb-817b-0a7ad802f28b.png) + +However, you may have noticed that now when you post a comment it no longer appears right away! ARGH! Okay, turns out there's one more thing we need to do. Remember when we told the comment creation logic to `refetchQueries`? We need to include any variables that were present the first time so that it can refetch the proper ones. + +#### Updating the Form Refetch + +Okay this is the last fix, promise! + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/CommentForm/CommentForm.jsx" +const [createComment, { loading, error }] = useMutation(CREATE, { + onCompleted: () => { + setHasPosted(true) + toast.success('Thank you for your comment!') + }, + // highlight-next-line + refetchQueries: [{ query: CommentsQuery, variables: { postId } }], +}) +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```jsx title="web/src/components/CommentForm/CommentForm.tsx" +const [createComment, { loading, error }] = useMutation(CREATE, { + onCompleted: () => { + setHasPosted(true) + toast.success('Thank you for your comment!') + }, + // highlight-next-line + refetchQueries: [{ query: CommentsQuery, variables: { postId } }], +}) +``` + +</TabItem> +</Tabs> + +There we go, comment engine complete! Our blog is totally perfect and there's absolutely nothing we could do to make it better. + +Or is there? diff --git a/docs/versioned_docs/version-7.0/tutorial/chapter6/comments-schema.md b/docs/versioned_docs/version-7.0/tutorial/chapter6/comments-schema.md new file mode 100644 index 000000000000..243488167736 --- /dev/null +++ b/docs/versioned_docs/version-7.0/tutorial/chapter6/comments-schema.md @@ -0,0 +1,933 @@ +# Adding Comments to the Schema + +Let's take a moment to appreciate how amazing this is—we built, designed and tested a completely new component for our app, which displays data from an API call (which would pull that data from a database) without actually having to build any of that backend functionality! Redwood let us provide fake data to Storybook and Jest so we could get our component working. + +Unfortunately, even with all of this flexibility there's still no such thing as a free lunch. Eventually we're going to have to actually do that backend work. Now's the time. + +If you went through the first part of the tutorial you should be somewhat familiar with this flow: + +1. Add a model to `schema.prisma` +2. Run a `yarn rw prisma migrate dev` commands to create a migration and apply it to the database +3. Generate an SDL and service + +### Adding the Comment model + +Let's do that now: + +```javascript title="api/db/schema.prisma" +datasource db { + provider = "sqlite" + url = env("DATABASE_URL") +} + +generator client { + provider = "prisma-client-js" + binaryTargets = "native" +} + +model Post { + id Int @id @default(autoincrement()) + title String + body String + // highlight-next-line + comments Comment[] + createdAt DateTime @default(now()) +} + +model Contact { + id Int @id @default(autoincrement()) + name String + email String + message String + createdAt DateTime @default(now()) +} + +model User { + id Int @id @default(autoincrement()) + name String? + email String @unique + hashedPassword String + salt String + resetToken String? + resetTokenExpiresAt DateTime? +} + +// highlight-start +model Comment { + id Int @id @default(autoincrement()) + name String + body String + post Post @relation(fields: [postId], references: [id]) + postId Int + createdAt DateTime @default(now()) +} +// highlight-end +``` + +Most of these lines look very similar to what we've already seen, but this is the first instance of a [relation](https://www.prisma.io/docs/reference/tools-and-interfaces/prisma-schema/relations) between two models. `Comment` gets two entries to denote this relationship: + +* `post` which has a type of `Post` and a special `@relation` keyword that tells Prisma how to connect a `Comment` to a `Post`. In this case the field `postId` references the field `id` in `Post` +* `postId` is just a regular `Int` column which contains the `id` of the `Post` that this comment is referencing + +This gives us a classic database model: + +``` +┌───────────┐ ┌───────────┐ +│ Post │ │ Comment │ +├───────────┤ ├───────────┤ +│ id │───┐ │ id │ +│ title │ │ │ name │ +│ body │ │ │ body │ +│ createdAt │ └──<│ postId │ +└───────────┘ │ createdAt │ + └───────────┘ +``` + +Note that there is no real database column named `post` in `Comment`—this is special syntax for Prisma to know how to connect the models together and for you to reference that connection. When you query for a `Comment` using Prisma you can get access to the attached `Post` using that name: + +```javascript +db.comment.findUnique({ where: { id: 1 }}).post() +``` + +Prisma also added a convenience `comments` field to `Post` which gives us the same capability in reverse: + +```javascript +db.post.findUnique({ where: { id: 1 }}).comments() +``` + +### Running the Migration + +This one is easy enough: we'll create a new migration with a name and then run it: + +```bash +yarn rw prisma migrate dev +``` + +When prompted, give this one a name something like "create comment". + +:::tip + +You'll need to restart the test suite runner at this point if it's still running. You can do a Ctrl-C or just press `q`. Redwood creates a second, test database for you to run your tests against (it is at `.redwood/test.db` by default). The database migrations are run against that test database whenever the test suite is *started*, not while it's running, so you'll need to restart it to test against the new database structure. + +::: + +### Creating the SDL and Service + +Next we'll create the SDL (that defines the GraphQL interface) and a service (to get the records out of the database) with a generator call: + +```bash +yarn rw g sdl Comment --no-crud +``` + +Note the `--no-crud` flag here. This gives us bare-bones functionality to start with (read-only access to our model) that we can build on. We got all the CRUD endpoints for free when we created the Post section of our site, so let's do the opposite here and see how to add functionality from scratch. + +That command will create both the SDL and the service. One change we'll need to make to the generated code is to allow access to anonymous users to view all comments. Change the `@requireAuth` directive to `@skipAuth` instead: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```graphql title="api/src/graphql/comments.sdl.js" +export const schema = gql` + type Comment { + id: Int! + name: String! + body: String! + post: Post! + postId: Int! + createdAt: DateTime! + } + + type Query { + // highlight-next-line + comments: [Comment!]! @skipAuth + } + + input CreateCommentInput { + name: String! + body: String! + postId: Int! + } + + input UpdateCommentInput { + name: String + body: String + postId: Int + } +` +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```graphql title="api/src/graphql/comments.sdl.ts" +export const schema = gql` + type Comment { + id: Int! + name: String! + body: String! + post: Post! + postId: Int! + createdAt: DateTime! + } + + type Query { + // highlight-next-line + comments: [Comment!]! @skipAuth + } + + input CreateCommentInput { + name: String! + body: String! + postId: Int! + } + + input UpdateCommentInput { + name: String + body: String + postId: Int + } +` +``` + +</TabItem> +</Tabs> + +Now if you take a look back at the real app in the browser (not Storybook) you should see a different message than the GraphQL error we were seeing before: + +![image](https://user-images.githubusercontent.com/300/101552505-d1405100-3967-11eb-883f-1227689e5f88.png) + +"Empty" means the Cell rendered correctly! There just aren't any comments in the database yet. Let's update the `CommentsCell` component to make that "Empty" message a little more friendly: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/CommentsCell/CommentsCell.jsx" +export const Empty = () => { + // highlight-next-line + return <div className="text-center text-gray-500">No comments yet</div> +} +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/components/CommentsCell/CommentsCell.tsx" +export const Empty = () => { + // highlight-next-line + return <div className="text-center text-gray-500">No comments yet</div> +} +``` + +</TabItem> +</Tabs> + +![image](https://user-images.githubusercontent.com/300/153501827-87b9f931-ee68-4baf-9342-3a70b03d55e2.png) + +That's better. Let's update the test that covers the Empty component render as well: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/CommentsCell/CommentsCell.test.jsx" +it('renders Empty successfully', async () => { + // highlight-start + render(<Empty />) + expect(screen.getByText('No comments yet')).toBeInTheDocument() + // highlight-end +}) +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/components/CommentsCell/CommentsCell.test.tsx" +it('renders Empty successfully', async () => { + // highlight-start + render(<Empty />) + expect(screen.getByText('No comments yet')).toBeInTheDocument() + // highlight-end +}) +``` + +</TabItem> +</Tabs> + +Okay, let's focus on the service for a bit. We'll need to add a function to let users create a new comment and we'll add a test that covers the new functionality. + +### Building out the Service + +By virtue of using the generator we've already got the function we need to select all comments from the database: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```javascript title="api/src/services/comments/comments.js" +import { db } from 'src/lib/db' + +export const comments = () => { + return db.comment.findMany() +} + +export const comment = ({ id }) => { + return db.comment.findUnique({ + where: { id }, + }) +} + +export const Comment = { + post: (_obj, { root }) => + db.comment.findUnique({ where: { id: root.id } }).post(), +} +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```ts title="api/src/services/comments/comments.ts" +import type { Prisma } from '@prisma/client' +import type { ResolverArgs } from '@redwoodjs/graphql-server' + +import { db } from 'src/lib/db' + +export const comments = () => { + return db.comment.findMany() +} + +export const comment = ({ id }: QueryResolvers['comment'] => { + return db.comment.findUnique({ + where: { id }, + }) +} + +export const Comment: CommentRelationResolvers = { + post: (_obj, { root }) => { + return db.comment.findUnique({ where: { id: root?.id } }).post() + }, +} +``` + +</TabItem> +</Tabs> + +We've also got a function that returns only a single comment, as well as this `Comment` object at the end. That allows us to return nested post data for a comment through GraphQL using syntax like this (don't worry about adding this code to our app, this is just an example): + +```graphql +query CommentsQuery { + comments { + id + name + body + createdAt + post { + id + title + body + createdAt + } + } +} +``` + +:::info + +Have you noticed that something may be amiss? The `comments()` function returns *all* comments, and all comments only. Could this come back to bite us? + +Hmmm... + +::: + +We need to be able to create a comment as well. We'll use the same convention that's used in Redwood's generated scaffolds: the create endpoint will accept a single parameter `input` which is an object with the individual model fields: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```javascript title="api/src/services/comments/comments.js" +export const createComment = ({ input }) => { + return db.comment.create({ + data: input, + }) +} +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```javascript title="api/src/services/comments/comments.ts" +interface CreateCommentArgs { + input: Prisma.CommentCreateInput +} + +export const createComment = ({ input }: CreateCommentArgs) => { + return db.comment.create({ + data: input, + }) +} +``` + +</TabItem> +</Tabs> + +We'll also need to expose this function via GraphQL so we'll add a Mutation to the SDL and use `@skipAuth` since, again, it can be accessed by everyone: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```graphql title="api/src/graphql/comments.sdl.js" +export const schema = gql` + type Comment { + id: Int! + name: String! + body: String! + post: Post! + postId: Int! + createdAt: DateTime! + } + + type Query { + comments: [Comment!]! @skipAuth + } + + input CreateCommentInput { + name: String! + body: String! + postId: Int! + } + + input UpdateCommentInput { + name: String + body: String + postId: Int + } + + // highlight-start + type Mutation { + createComment(input: CreateCommentInput!): Comment! @skipAuth + } + // highlight-end +` +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```graphql title="api/src/graphql/comments.sdl.ts" +export const schema = gql` + type Comment { + id: Int! + name: String! + body: String! + post: Post! + postId: Int! + createdAt: DateTime! + } + + type Query { + comments: [Comment!]! @skipAuth + } + + input CreateCommentInput { + name: String! + body: String! + postId: Int! + } + + input UpdateCommentInput { + name: String + body: String + postId: Int + } + + // highlight-start + type Mutation { + createComment(input: CreateCommentInput!): Comment! @skipAuth + } + // highlight-end +` +``` + +</TabItem> +</Tabs> + +:::tip + +The `CreateCommentInput` type was already created for us by the SDL generator. + +::: + +That's all we need on the api-side to create a comment! But let's think for a moment: is there anything else we need to do with a comment? Let's make the decision that users won't be able to update an existing comment. And we don't need to select individual comments (remember earlier we talked about the possibility of each comment being responsible for its own API request and display, but we decided against it). + +What about deleting a comment? We won't let a user delete their own comment, but as owners of the blog we should be able to delete/moderate them. So we'll need a delete function and API endpoint as well. Let's add those: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```javascript title="api/src/services/comments/comments.js" +export const deleteComment = ({ id }) => { + return db.comment.delete({ + where: { id }, + }) +} +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```ts title="api/src/services/comments/comments.ts" +export const deleteComment = ({ id }: Prisma.CommentWhereUniqueInput) => { + return db.comment.delete({ + where: { id }, + }) +} +``` + +</TabItem> +</Tabs> + +Since we only want owners of the blog to be able to delete comments, we'll use `@requireAuth`: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```graphql title="api/src/graphql/comments.sdl.js" +type Mutation { + createComment(input: CreateCommentInput!): Comment! @skipAuth + // highlight-next-line + deleteComment(id: Int!): Comment! @requireAuth +} +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```graphql title="api/src/graphql/comments.sdl.ts" +type Mutation { + createComment(input: CreateCommentInput!): Comment! @skipAuth + // highlight-next-line + deleteComment(id: Int!): Comment! @requireAuth +} +``` + +</TabItem> +</Tabs> + +`deleteComment` will be given a single argument, the ID of the comment to delete, and it's required. A common pattern is to return the record that was just deleted in case you wanted to notify the user or some other system about the details of the thing that was just removed, so we'll do that here as well. But, you could just as well return `null`. + +### Testing the Service + +Let's make sure our service functionality is working and continues to work as we modify our app. + +If you open up `api/src/services/comments/comments.test.js` you'll see there's one in there already, making sure that retrieving all comments (the default `comments()` function that was generated along with the service) works: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```javascript title="api/src/services/comments/comments.test.js" +import { comments } from './comments' + +describe('comments', () => { + scenario('returns all comments', async (scenario) => { + const result = await comments() + + expect(result.length).toEqual(Object.keys(scenario.comment).length) + }) +}) +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```javascript title="api/src/services/comments/comments.test.ts" +import { comments } from './comments' + +describe('comments', () => { + scenario('returns all comments', async (scenario: StandardScenario) => { + const result = await comments() + + expect(result.length).toEqual(Object.keys(scenario.comment).length) + }) +}) +``` + +</TabItem> +</Tabs> + +What is this `scenario()` function? That's made available by Redwood that mostly acts like Jest's built-in `it()` and `test()` functions, but with one important difference: it pre-seeds a test database with data that is then passed to you in the `scenario` argument. You can count on this data existing in the database and being reset between tests in case you make changes to it. You can create the data structure for any and all models defined in `schema.prisma`, not just comments (the file happens to be named that because it's the ones that will load when running `comments.test.js`). + +:::info In the section on mocks you said relying on data in the database for testing was dumb? + +Yes, all things being equal it would be great to not have these tests depend on a piece of software outside of our control. + +However, the difference here is that in a service almost all of the logic you write will depend on moving data in and out of a database and it's much simpler to just let that code run and *really* access the database, rather than trying to mock and intercept each and every possible call that Prisma could make. + +Not to mention that Prisma itself is currently under development and implementations could change at any time. Trying to keep pace with those changes and constantly keep mocks in sync would be a nightmare! + +That being said, if you really wanted to you could use Jest's [mocking utilities](https://jestjs.io/docs/en/mock-functions) and completely mock the Prisma interface to abstract the database away completely. But don't say we didn't warn you! + +::: + +Where does that data come from? Take a look at the `comments.scenarios.{js,ts}` file which is next door: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```javascript title="api/src/services/comments.scenarios.js" +export const standard = defineScenario({ + comment: { + one: { + data: { + name: 'String', + body: 'String', + post: { create: { title: 'String', body: 'String' } }, + }, + }, + two: { + data: { + name: 'String', + body: 'String', + post: { create: { title: 'String', body: 'String' } }, + }, + }, + }, +}) +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```ts title="api/src/services/comments.scenarios.ts" +import type { Prisma } from '@prisma/client' + +export const standard = defineScenario<Prisma.CommentCreateArgs>({ + comment: { + one: { + data: { + name: 'String', + body: 'String', + post: { create: { title: 'String', body: 'String' } }, + }, + }, + two: { + data: { + name: 'String', + body: 'String', + post: { create: { title: 'String', body: 'String' } }, + }, + }, + }, +}) +``` + +</TabItem> +</Tabs> + +This calls a `defineScenario()` function which checks that your data structure matches what's defined in Prisma. Each scenario data object (for example, `scenario.comment.one`) is passed as-is to Prisma's [`create`](https://www.prisma.io/docs/reference/api-reference/prisma-client-reference#create). That way you can customize the scenario object using any of Prisma's supported options. + +:::info The "standard" scenario + +The exported scenario here is named "standard." Remember when we worked on component tests and mocks, there was a special mock named `standard` which Redwood would use by default if you didn't specify a name? The same rule applies here! When we add a test for `createComment()` we'll see an example of using a different scenario with a unique name. + +::: + +The nested structure of a scenario is defined like this: + +* **comment**: the name of the model this data is for + * **one, two**: a friendly name given to the scenario data which you can reference in your tests + * **data**: contains the actual data that will be put in the database + * **name, body, post**: fields that correspond to the schema. In this case a **Comment** requires that it be related to a **Post**, so the scenario has a `post` key and values as well (using Prisma's [nested create syntax](https://www.prisma.io/docs/concepts/components/prisma-client/relation-queries#nested-writes)) + * **select, include**: optionally, to customize the object to `select` or `include` related fields [using Prisma's syntax](https://www.prisma.io/docs/concepts/components/prisma-client/relation-queries#create-a-related-record) + +When you receive the `scenario` argument in your test, the `data` key gets unwrapped so that you can reference fields like `scenario.comment.one.name`. + +:::info Why does every field just contain the string "String"? + +When generating the service (and the test and scenarios) all we (Redwood) knows about your data is the types for each field as defined in `schema.prisma`, namely `String`, `Integer` or `DateTime`. So we add the simplest data possible that fulfills the type requirement by Prisma to get the data into the database. You should definitely replace this data with something that looks more like the real data your app will be expecting. In fact... + +::: + +Let's replace that scenario data with something more like the real data our app will be expecting: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```javascript title="api/src/services/comments/comments.scenarios.js" +export const standard = defineScenario({ + comment: { + // highlight-start + jane: { + data: { + name: 'Jane Doe', + body: 'I like trees', + post: { + create: { + title: 'Redwood Leaves', + body: 'The quick brown fox jumped over the lazy dog.' + } + } + } + }, + john: { + data: { + name: 'John Doe', + body: 'Hug a tree today', + post: { + create: { + title: 'Root Systems', + body: 'The five boxing wizards jump quickly.' + } + } + } + } + // highlight-end + } +}) +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```ts title="api/src/services/comments/comments.scenarios.ts" +import type { Prisma } from '@prisma/client' + +export const standard = defineScenario<Prisma.CommentCreateArgs>({ + comment: { + // highlight-start + jane: { + data: { + name: 'Jane Doe', + body: 'I like trees', + post: { + create: { + title: 'Redwood Leaves', + body: 'The quick brown fox jumped over the lazy dog.' + } + } + } + }, + john: { + data: { + name: 'John Doe', + body: 'Hug a tree today', + post: { + create: { + title: 'Root Systems', + body: 'The five boxing wizards jump quickly.', + } + } + } + } + // highlight-end + } +}) +``` + +</TabItem> +</Tabs> + +Note that we changed the names of the records from `one` and `two` to the names of the authors, `jane` and `john`. More on that later. Why didn't we include `id` or `createdAt` fields? We told Prisma, in `schema.prisma`, to assign defaults to these fields so they'll be set automatically when the records are created. + +The test created by the service generator simply checks to make sure the same number of records are returned so changing the content of the data here won't affect the test. + +#### Testing createComment() + +Let's add our first service test by making sure that `createComment()` actually stores a new comment in the database. When creating a comment we're not as worried about existing data in the database so let's create a new scenario which only contains a post—the post we'll be linking the new comment to through the comment's `postId` field. You can create multiple scenarios and then say which one you want pre-loaded into the database at the time the test is run. We'll let the `standard` scenario stay as-is and make a new one with a new set of data: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```javascript title="api/src/services/comments/comments.scenarios.js" +export const standard = defineScenario({ + // ... +}) + +// highlight-start +export const postOnly = defineScenario({ + post: { + bark: { + data: { + title: 'Bark', + body: "A tree's bark is worse than its bite", + } + } + } +}) +// highlight-end +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```ts title="api/src/services/comments/comments.scenarios.ts" +import type { Prisma } from '@prisma/client' + +export const standard = defineScenario<Prisma.CommentCreateArgs>({ + // ... +}) + +// highlight-start +export const postOnly = defineScenario<Prisma.PostCreateArgs>({ + post: { + bark: { + data: { + title: 'Bark', + body: "A tree's bark is worse than its bite", + } + } + } +}) +// highlight-end + +export type StandardScenario = typeof standard +// highlight-next-line +export type PostOnlyScenario = typeof postOnly +``` + +</TabItem> +</Tabs> + +Now we can pass the `postOnly` scenario name as the first argument to a new `scenario()` test: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```javascript title="api/src/services/comments/comments.test.js" +// highlight-next-line +import { comments, createComment } from './comments' + +describe('comments', () => { + scenario('returns all comments', async (scenario) => { + const result = await comments() + + expect(result.length).toEqual(Object.keys(scenario.comment).length) + }) + + // highlight-start + scenario('postOnly', 'creates a new comment', async (scenario) => { + const comment = await createComment({ + input: { + name: 'Billy Bob', + body: 'What is your favorite tree bark?', + post: { + connect: { id: scenario.post.bark.id }, + }, + }, + }) + + expect(comment.name).toEqual('Billy Bob') + expect(comment.body).toEqual('What is your favorite tree bark?') + expect(comment.postId).toEqual(scenario.post.bark.id) + expect(comment.createdAt).not.toEqual(null) + }) + // highlight-end +}) +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```ts title="api/src/services/comments/comments.test.ts" +// highlight-next-line +import { comments, createComment } from './comments' + +// highlight-next-line +import type { StandardScenario, PostOnlyScenario } from './comments.scenarios' + +describe('comments', () => { + scenario('returns all comments', async (scenario: StandardScenario) => { + const result = await comments() + + expect(result.length).toEqual(Object.keys(scenario.comment).length) + }) + + // highlight-start + scenario( + 'postOnly', + 'creates a new comment', + async (scenario: PostOnlyScenario) => { + const comment = await createComment({ + input: { + name: 'Billy Bob', + body: 'What is your favorite tree bark?', + post: { + connect: { id: scenario.post.bark.id }, + }, + }, + }) + + expect(comment.name).toEqual('Billy Bob') + expect(comment.body).toEqual('What is your favorite tree bark?') + expect(comment.postId).toEqual(scenario.post.bark.id) + expect(comment.createdAt).not.toEqual(null) + } + ) + // highlight-end +}) +``` + +</TabItem> +</Tabs> + +We pass an optional first argument to `scenario()` which is the named scenario to use, instead of the default of "standard." + +We were able to use the `id` of the post that we created in our scenario because the scenarios contain the actual database data after being inserted, not just the few fields we defined in the scenario itself. In addition to `id` we could access `createdAt` which is defaulted to `now()` in the database. + +:::info What's that `post: { connect: { id } }` nested structure? Can't we simply pass the Post's ID directly here? + +What you're looking at is the [connect syntax](https://www.prisma.io/docs/concepts/components/prisma-client/relation-queries#connect-an-existing-record), which is a Prisma +core concept. And yes, we could simply pass `postId: scenario.post.bark.id` instead – as a so-called "unchecked" input. But as the name implies, the connect syntax is king +in Prisma-land. + +<ShowForTs> +Note that if you try to use `postId` that would give you red squiggles, because that input would violate the `CreateCommentArgs` interface definition in +`api/src/services/comments/comments.ts`. In order to use the `postId` input, that'd need to be changed to + +```ts +interface CreateCommentArgs { + input: Prisma.CommentUncheckedCreateInput +} +``` + +or + +```ts +interface CreateCommentArgs { + input: Prisma.CommentCreateInput | Prisma.CommentUncheckedCreateInput +} +``` +in case we wanted to allow both ways – which Prisma generally allows, however [it doesn't allow to pick and mix](https://stackoverflow.com/a/69169106/1246547) within the same input. +</ShowForTs> + +::: + +We'll test that all the fields we give to the `createComment()` function are actually created in the database, and for good measure just make sure that `createdAt` is set to a non-null value. We could test that the actual timestamp is correct, but that involves freezing the JavaScript Date object so that no matter how long the test takes, you can still compare the value to `new Date` which is right *now*, down to the millisecond. While possible, it's beyond the scope of our easy, breezy tutorial since it gets [very gnarly](https://codewithhugo.com/mocking-the-current-date-in-jest-tests/)! + +:::info What's up with the names for scenario data? `posts.bark`? Really? + +This makes reasoning about your tests much nicer! Which of these would you rather work with: + +**"`claire` paid for an `ebook` using her `visa` credit card."** + +or: + +**"`user[3]` paid for `product[0]` using their `cards[2]` credit card?** + +If you said the second one, remember: you're not writing your code for the computer, you're writing it for other humans! It's the compiler's job to make code understandable to a computer, it's our job to make code understandable to our fellow developers. + +::: + +Okay, our comments service is feeling pretty solid now that we have our tests in place. The last step is add a form so that users can actually leave a comment on a blog post. + +:::info Mocks vs. Scenarios + +Mocks are used on the web site and scenarios are used on the api side. It might be helpful to remember that **mock** is a synonym for "fake", as in "this is fake data not really in the database" (so that we can create stories and tests in isolation without the api side getting involved). Whereas a **scenario** is real data in the database, it's just pre-set to some known state that we can rely on. + +Maybe a [mnemonic](https://www.mnemonicgenerator.com/?words=M%20W%20S%20A) would help? + +**M**ocks : **W**eb :: **S**cenarios : **A**PI: + +* Mysterious Weasels Scratched Armor +* Minesweepers Wrecked Subliminal Attorneys +* Martian Warriors Squeezed Apricots + +Maybe not... + +::: diff --git a/docs/versioned_docs/version-7.0/tutorial/chapter6/multiple-comments.md b/docs/versioned_docs/version-7.0/tutorial/chapter6/multiple-comments.md new file mode 100644 index 000000000000..0ac48bf9578f --- /dev/null +++ b/docs/versioned_docs/version-7.0/tutorial/chapter6/multiple-comments.md @@ -0,0 +1,722 @@ +# Multiple Comments + +Our amazing blog posts will obviously garner a huge and passionate fanbase and we will very rarely have only a single comment. Let's work on displaying a list of comments. + +Let's think about where our comments are being displayed. Probably not on the homepage, since that only shows a summary of each post. A user would need to go to the full page to show the comments for that blog post. But that page is only fetching the data for the single blog post itself, nothing else. We'll need to get the comments and since we'll be fetching *and* displaying them, that sounds like a job for a Cell. + +:::info Couldn't the query for the blog post page also fetch the comments? + +Yes, it could! But the idea behind Cells is to make components even more [composable](https://en.wikipedia.org/wiki/Composability) by having them be responsible for their own data fetching *and* display. If we rely on a blog post to fetch the comments then the new Comments component we're about to create now requires something *else* to fetch the comments and pass them in. If we re-use the Comments component somewhere, now we're fetching comments in two different places. + +**But what about the Comment component we just made, why doesn't that fetch its own data?** + +There aren't any instances I (the author) could think of where we would ever want to display only a single comment in isolation—it would always be a list of all comments on a post. If displaying a single comment was common for your use case then it could definitely be converted to a **CommentCell** and have it responsible for pulling the data for that single comment itself. But keep in mind that if you have 50 comments on a blog post, that's now 50 GraphQL calls that need to go out, one for each comment. There's always a trade-off! + +**Then why make a standalone Comment component at all? Why not just do all the display in the CommentsCell?** + +We're trying to start in small chunks to make the tutorial more digestible for a new audience so we're starting simple and getting more complex as we go. But it also just feels *nice* to build up a UI from these smaller chunks that are easier to reason about and keep separate in your head. + +**But what about—** + +Look, we gotta end this sidebar and get back to building this thing. You can ask more questions later, promise! + +::: + +### Storybook + +Let's generate a **CommentsCell**: + +```bash +yarn rw g cell Comments +``` + +Storybook updates with a new **CommentsCell** under the **Cells** folder, and it's actually showing something: + +![image](https://user-images.githubusercontent.com/300/153477642-0d5a15a5-f96f-485a-b8b0-dbc1c4515279.png) + +Where did that come from? Check out `CommentsCell.mock.{js,ts}`: there's no Prisma model for a Comment yet, so Redwood took a guess that your model would at least contain an `id` field and just used that for the mock data. + +Let's update the `Success` component to use the `Comment` component created earlier, and add all of the fields we'll need for the **Comment** to render to the `QUERY`: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/CommentsCell/CommentsCell.jsx" +// highlight-next-line +import Comment from 'src/components/Comment' + +export const QUERY = gql` + query CommentsQuery { + comments { + id + // highlight-start + name + body + createdAt + // highlight-end + } + } +` + +export const Loading = () => <div>Loading...</div> + +export const Empty = () => <div>Empty</div> + +export const Failure = ({ error }) => ( + <div style={{ color: 'red' }}>Error: {error.message}</div> +) + +export const Success = ({ comments }) => { + return ( + // highlight-start + <> + {comments.map((comment) => ( + <Comment key={comment.id} comment={comment} /> + ))} + </> + // highlight-end + ) +} +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/components/CommentsCell/CommentsCell.tsx" +// highlight-next-line +import Comment from 'src/components/Comment' + +import type { CommentsQuery } from 'types/graphql' +import type { CellSuccessProps, CellFailureProps } from '@redwoodjs/web' + +export const QUERY = gql` + query CommentsQuery { + comments { + id + // highlight-start + name + body + createdAt + // highlight-end + } + } +` + +export const Loading = () => <div>Loading...</div> + +export const Empty = () => <div>Empty</div> + +export const Failure = ({ error }: CellFailureProps) => ( + <div style={{ color: 'red' }}>Error: {error.message}</div> +) + +export const Success = ({ comments }: CellSuccessProps<CommentsQuery>) => { + return ( + // highlight-start + <> + {comments.map((comment) => ( + <Comment key={comment.id} comment={comment} /> + ))} + </> + // highlight-end + ) +} +``` + +</TabItem> +</Tabs> + +We're passing an additional `key` prop to make React happy when iterating over an array with `map`. + +If you check Storybook, you'll see that we do indeed render the `Comment` component three times, but there's no data to display. Let's update the mock with some sample data: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```javascript title="web/src/components/CommentsCell/CommentsCell.mock.js" +export const standard = () => ({ + // highlight-start + comments: [ + { + id: 1, + name: 'Rob Cameron', + body: 'First comment', + createdAt: '2020-01-02T12:34:56Z', + }, + { + id: 2, + name: 'David Price', + body: 'Second comment', + createdAt: '2020-02-03T23:00:00Z', + }, + ], + // highlight-end +}) +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```ts title="web/src/components/CommentsCell/CommentsCell.mock.ts" +export const standard = () => ({ + // highlight-start + comments: [ + { + id: 1, + name: 'Rob Cameron', + body: 'First comment', + createdAt: '2020-01-02T12:34:56Z', + }, + { + id: 2, + name: 'David Price', + body: 'Second comment', + createdAt: '2020-02-03T23:00:00Z', + }, + ], + // highlight-end +}) +``` + +</TabItem> +</Tabs> + +:::info What's this `standard` thing? + +Think of it as the standard, default mock if you don't do anything else. We would have loved to use the name "default" but that's already a reserved word in JavaScript! + +::: + +Storybook refreshes and we've got comments! It's a little hard to distinguish between the two separate comments because they're right next to each other: + +![image](https://user-images.githubusercontent.com/300/153478670-14c32c29-6d1d-491b-bc2b-b033557a6d84.png) + +Since `CommentsCell` is the one responsible for drawing multiple comments, it makes sense that it should be "in charge" of how they're displayed, including the gap between them. Let's add a style to do that in `CommentsCell`: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/CommentsCell/CommentsCell.jsx" +export const Success = ({ comments }) => { + return ( + // highlight-next-line + <div className="space-y-8"> + {comments.map((comment) => ( + <Comment comment={comment} key={comment.id} /> + ))} + // highlight-next-line + </div> + ) +} +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/components/CommentsCell/CommentsCell.tsx" +export const Success = ({ comments }) => { + return ( + // highlight-next-line + <div className="space-y-8"> + {comments.map((comment) => ( + <Comment comment={comment} key={comment.id} /> + ))} + // highlight-next-line + </div> + ) +} +``` + +</TabItem> +</Tabs> + +:::tip + +`space-y-8` is a handy Tailwind class that puts a space *between* elements, but not above or below the entire stack (which is what would happen if you gave each `<Comment>` its own top/bottom margin). + +::: + +Looking good! Let's add our CommentsCell to the actual blog post display page: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/Article/Article.jsx" +import { Link, routes } from '@redwoodjs/router' +// highlight-next-line +import CommentsCell from 'src/components/CommentsCell' + +const truncate = (text, length) => { + return text.substring(0, length) + '...' +} + +const Article = ({ article, summary = false }) => { + return ( + <article> + <header> + <h2 className="text-xl text-blue-700 font-semibold"> + <Link to={routes.article({ id: article.id })}>{article.title}</Link> + </h2> + </header> + <div className="mt-2 text-gray-900 font-light"> + {summary ? truncate(article.body, 100) : article.body} + </div> + // highlight-next-line + {!summary && <CommentsCell />} + </article> + ) +} + +export default Article +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/components/Article/Article.tsx" +import { Link, routes } from '@redwoodjs/router' +// highlight-next-line +import CommentsCell from 'src/components/CommentsCell' + +import type { Post } from 'types/graphql' + +const truncate = (text: string, length: number) => { + return text.substring(0, length) + '...' +} + +interface Props { + article: Omit<Post, 'createdAt'> + summary?: boolean +} + +const Article = ({ article, summary = false }: Props) => { + return ( + <article> + <header> + <h2 className="text-xl text-blue-700 font-semibold"> + <Link to={routes.article({ id: article.id })}>{article.title}</Link> + </h2> + </header> + <div className="mt-2 text-gray-900 font-light"> + {summary ? truncate(article.body, 100) : article.body} + </div> + // highlight-next-line + {!summary && <CommentsCell />} + </article> + ) +} + +export default Article +``` + +</TabItem> +</Tabs> + +If we are *not* showing the summary, then we'll show the comments. Take a look at the **Full** and **Summary** stories in Storybook and you should see comments on one and not on the other. + +:::info Shouldn't the `CommentsCell` cause an actual GraphQL request? How does this work? + +Redwood has added some functionality around Storybook so that if you're testing a component that itself isn't a Cell (like the `Article` component) but that renders a cell (like `CommentsCell`), then it will mock the GraphQL and use the `standard` mock that goes along with that Cell. Pretty cool, huh? + +::: + +Adding the comments to the article display has exposed another design issue: the comments are sitting right up underneath the article text: + +![image](https://user-images.githubusercontent.com/300/153480229-ea483d75-62bf-4b56-b248-10ca1597a7a8.png) + +Let's add a gap between the two: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/Article/Article.jsx" +const Article = ({ article, summary = false }) => { + return ( + <article> + <header> + <h2 className="text-xl text-blue-700 font-semibold"> + <Link to={routes.article({ id: article.id })}>{article.title}</Link> + </h2> + </header> + <div className="mt-2 text-gray-900 font-light"> + {summary ? truncate(article.body, 100) : article.body} + </div> + // highlight-start + {!summary && ( + <div className="mt-12"> + <CommentsCell /> + </div> + )} + // highlight-end + </article> + ) +} +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/components/Article/Article.tsx" +const Article = ({ article, summary = false }: Props) => { + return ( + <article> + <header> + <h2 className="text-xl text-blue-700 font-semibold"> + <Link to={routes.article({ id: article.id })}>{article.title}</Link> + </h2> + </header> + <div className="mt-2 text-gray-900 font-light"> + {summary ? truncate(article.body, 100) : article.body} + </div> + // highlight-start + {!summary && ( + <div className="mt-12"> + <CommentsCell /> + </div> + )} + // highlight-end + </article> + ) +} +``` + +</TabItem> +</Tabs> + +![image](https://user-images.githubusercontent.com/300/153480489-a59f27e3-6d70-4548-9a1e-4036b6860444.png) + +Okay, comment display is looking good! However, you may have noticed that if you tried going to the actual site there's an error where the comments should be: + +![image](https://user-images.githubusercontent.com/300/153480635-58ada8e8-ed5b-41b6-875b-501a07a36d9a.png) + +Why is that? Remember that we started with the `CommentsCell`, but never actually created a Comment model in `schema.prisma` or created an SDL and service! We'll be rectifying this soon. But this demonstrates another huge benefit of working with Storybook: you can build out UI functionality completely isolated from the api-side. In a team setting this is great because a web-side team can work on the UI while the api-side team can be building the backend end simultaneously and one doesn't have to wait for the other. + +### Testing + +We added a component, `CommentsCell`, and edited another, `Article`, so what do we test, and where? + +#### Testing Comments + +The actual `Comment` component does most of the work so there's no need to test all of that functionality again in `CommentsCell`: our `Comment` tests cover that just fine. What things does `CommentsCell` do that make it unique? + +* Has a loading message +* Has an error message +* Has a failure message +* When it renders successfully, it outputs as many comments as were returned by the `QUERY` (*what* is rendered we'll leave to the `Comment` tests) + +The default `CommentsCell.test.{jsx,tsx}` actually tests every state for us, albeit at an absolute minimum level—it makes sure no errors are thrown: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/CommentsCell/CommentsCell.test.jsx" +import { render } from '@redwoodjs/testing/web' + +import { Loading, Empty, Failure, Success } from './CommentsCell' +import { standard } from './CommentsCell.mock' + +describe('CommentsCell', () => { + it('renders Loading successfully', () => { + expect(() => { + render(<Loading />) + }).not.toThrow() + }) + + it('renders Empty successfully', async () => { + expect(() => { + render(<Empty />) + }).not.toThrow() + }) + + it('renders Failure successfully', async () => { + expect(() => { + render(<Failure error={new Error('Oh no')} />) + }).not.toThrow() + }) + + it('renders Success successfully', async () => { + expect(() => { + render(<Success comments={standard().comments} />) + }).not.toThrow() + }) +}) +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/components/CommentsCell/CommentsCell.test.tsx" +import { render } from '@redwoodjs/testing/web' + +import { Loading, Empty, Failure, Success } from './CommentsCell' +import { standard } from './CommentsCell.mock' + +describe('CommentsCell', () => { + it('renders Loading successfully', () => { + expect(() => { + render(<Loading />) + }).not.toThrow() + }) + + it('renders Empty successfully', async () => { + expect(() => { + render(<Empty />) + }).not.toThrow() + }) + + it('renders Failure successfully', async () => { + expect(() => { + render(<Failure error={new Error('Oh no')} />) + }).not.toThrow() + }) + + it('renders Success successfully', async () => { + expect(() => { + render(<Success comments={standard().comments} />) + }).not.toThrow() + }) +}) +``` + +</TabItem> +</Tabs> + +And that's nothing to scoff at! As you've probably experienced, a React component usually either works 100% or blows up spectacularly. If it works, great! If it fails then the test fails too, which is exactly what we want to happen. + +But in this case we can do a little more to make sure `CommentsCell` is doing what we expect. Let's update the `Success` test in `CommentsCell.test.{js,ts}` to check that exactly the number of comments we passed in as a prop are rendered. How do we know a comment was rendered? How about if we check that each `comment.body` (the most important part of the comment) is present on the screen: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/CommentsCell/CommentsCell.test.jsx" +// highlight-next-line +import { render, screen } from '@redwoodjs/testing/web' + +import { Loading, Empty, Failure, Success } from './CommentsCell' +import { standard } from './CommentsCell.mock' + +describe('CommentsCell', () => { + it('renders Loading successfully', () => { + expect(() => { + render(<Loading />) + }).not.toThrow() + }) + + it('renders Empty successfully', async () => { + expect(() => { + render(<Empty />) + }).not.toThrow() + }) + + it('renders Failure successfully', async () => { + expect(() => { + render(<Failure error={new Error('Oh no')} />) + }).not.toThrow() + }) + + it('renders Success successfully', async () => { + // highlight-start + const comments = standard().comments + render(<Success comments={comments} />) + + comments.forEach((comment) => { + expect(screen.getByText(comment.body)).toBeInTheDocument() + }) + // highlight-end + }) +}) + +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/components/CommentsCell/CommentsCell.test.tsx" +// highlight-next-line +import { render, screen } from '@redwoodjs/testing/web' + +import { Loading, Empty, Failure, Success } from './CommentsCell' +import { standard } from './CommentsCell.mock' + +describe('CommentsCell', () => { + it('renders Loading successfully', () => { + expect(() => { + render(<Loading />) + }).not.toThrow() + }) + + it('renders Empty successfully', async () => { + expect(() => { + render(<Empty />) + }).not.toThrow() + }) + + it('renders Failure successfully', async () => { + expect(() => { + render(<Failure error={new Error('Oh no')} />) + }).not.toThrow() + }) + + it('renders Success successfully', async () => { + // highlight-start + const comments = standard().comments + render(<Success comments={comments} />) + + comments.forEach((comment) => { + expect(screen.getByText(comment.body)).toBeInTheDocument() + }) + // highlight-end + }) +}) + +``` + +</TabItem> +</Tabs> + +We're looping through each `comment` from the mock, the same mock used by Storybook, so that even if we add more later, we're covered. You may find yourself writing a test and saying "just test that there are two total comments," which will work today, but months from now when you add more comments to the mock to try some different iterations in Storybook, that test will start failing. Avoid hardcoding data like this, especially [magic numbers](https://en.wikipedia.org/wiki/Magic_number_(programming)), into your test when you can derive it from your mocked data! + +#### Testing Article + +The functionality we added to `Article` says to show the comments for the post if we are *not* showing the summary. We've got a test for both the "full" and "summary" renders already. Generally you want a test to be testing "one thing," like whether the body of the article is present, and another test for whether the comments are displaying. If you find that you're using "and" in your test description (like "renders a blog post and its comments") that's a good sign that it should probably be split into two separate tests. + +Let's add two additional tests for our new functionality: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/Article/Article.test.jsx" +// highlight-start +import { render, screen, waitFor } from '@redwoodjs/testing' + +import { standard } from 'src/components/CommentsCell/CommentsCell.mock' +// highlight-end + +import Article from './Article' + +const ARTICLE = { + id: 1, + title: 'First post', + body: `Neutra tacos hot chicken prism raw denim, put a bird on it enamel pin post-ironic vape cred DIY. Street art next level umami squid. Hammock hexagon glossier 8-bit banjo. Neutra la croix mixtape echo park four loko semiotics kitsch forage chambray. Semiotics salvia selfies jianbing hella shaman. Letterpress helvetica vaporware cronut, shaman butcher YOLO poke fixie hoodie gentrify woke heirloom.`, + createdAt: new Date().toISOString(), +} + +describe('Article', () => { + it('renders a blog post', () => { + render(<Article article={ARTICLE} />) + + expect(screen.getByText(ARTICLE.title)).toBeInTheDocument() + expect(screen.getByText(ARTICLE.body)).toBeInTheDocument() + }) + + // highlight-start + it('renders comments when displaying a full blog post', async () => { + const comment = standard().comments[0] + render(<Article article={ARTICLE} />) + + await waitFor(() => + expect(screen.getByText(comment.body)).toBeInTheDocument() + ) + }) + // highlight-end + + it('renders a summary of a blog post', () => { + render(<Article article={ARTICLE} summary={true} />) + + expect(screen.getByText(ARTICLE.title)).toBeInTheDocument() + expect( + screen.getByText( + 'Neutra tacos hot chicken prism raw denim, put a bird on it enamel pin post-ironic vape cred DIY. Str...' + ) + ).toBeInTheDocument() + }) + + // highlight-start + it('does not render comments when displaying a summary', async () => { + const comment = standard().comments[0] + render(<Article article={ARTICLE} summary={true} />) + + await waitFor(() => + expect(screen.queryByText(comment.body)).not.toBeInTheDocument() + ) + }) + // highlight-end +}) +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/components/Article/Article.test.tsx" +// highlight-start +import { render, screen, waitFor } from '@redwoodjs/testing' + +import { standard } from 'src/components/CommentsCell/CommentsCell.mock' +// highlight-end + +import Article from './Article' + +const ARTICLE = { + id: 1, + title: 'First post', + body: `Neutra tacos hot chicken prism raw denim, put a bird on it enamel pin post-ironic vape cred DIY. Street art next level umami squid. Hammock hexagon glossier 8-bit banjo. Neutra la croix mixtape echo park four loko semiotics kitsch forage chambray. Semiotics salvia selfies jianbing hella shaman. Letterpress helvetica vaporware cronut, shaman butcher YOLO poke fixie hoodie gentrify woke heirloom.`, + createdAt: new Date().toISOString(), +} + +describe('Article', () => { + it('renders a blog post', () => { + render(<Article article={ARTICLE} />) + + expect(screen.getByText(ARTICLE.title)).toBeInTheDocument() + expect(screen.getByText(ARTICLE.body)).toBeInTheDocument() + }) + + // highlight-start + it('renders comments when displaying a full blog post', async () => { + const comment = standard().comments[0] + render(<Article article={ARTICLE} />) + + await waitFor(() => + expect(screen.getByText(comment.body)).toBeInTheDocument() + ) + }) + // highlight-end + + it('renders a summary of a blog post', () => { + render(<Article article={ARTICLE} summary={true} />) + + expect(screen.getByText(ARTICLE.title)).toBeInTheDocument() + expect( + screen.getByText( + 'Neutra tacos hot chicken prism raw denim, put a bird on it enamel pin post-ironic vape cred DIY. Str...' + ) + ).toBeInTheDocument() + }) + + // highlight-start + it('does not render comments when displaying a summary', async () => { + const comment = standard().comments[0] + render(<Article article={ARTICLE} summary={true} />) + + await waitFor(() => + expect(screen.queryByText(comment.body)).not.toBeInTheDocument() + ) + }) + // highlight-end +}) +``` + +</TabItem> +</Tabs> + +Notice we're importing the mock from a completely different component—nothing wrong with that! + +We're introducing a new test function here, `waitFor()`, which will wait for things like GraphQL queries to finish running before checking for what's been rendered. Since `Article` renders `CommentsCell` we need to wait for the `Success` component of `CommentsCell` to be rendered. + +:::info + +The summary version of `Article` does *not* render the `CommentsCell`, but we should still wait. Why? If we did mistakenly start including `CommentsCell`, but didn't wait for the render, we would get a falsely passing test—indeed the text isn't on the page but that's because it's still showing the `Loading` component! If we had waited we would have seen the actual comment body get rendered, and the test would (correctly) fail. + +::: + +Okay we're finally ready to let users create their comments. diff --git a/docs/versioned_docs/version-7.0/tutorial/chapter6/the-redwood-way.md b/docs/versioned_docs/version-7.0/tutorial/chapter6/the-redwood-way.md new file mode 100644 index 000000000000..316154c266d1 --- /dev/null +++ b/docs/versioned_docs/version-7.0/tutorial/chapter6/the-redwood-way.md @@ -0,0 +1,316 @@ +# Building a Component the Redwood Way + +What's our blog missing? Comments. Let's add a simple comment engine so people can leave +their completely rational, well-reasoned comments on our blog posts. It's the internet, +what could go wrong? + +There are two main features we need to build: + +1. Comment form and creation +2. Comment retrieval and display + +Which order we build them in is up to us. To ease into things, let's start with the fetching and displaying comments first and then we'll move on to more complex work of adding a form and service to create a new comment. Of course, this is Redwood, so even forms and services aren't *that* complex! + +### Storybook + +Let's create a component for the display of a single comment. First up, the generator: + +```bash +yarn rw g component Comment +``` + +Storybook should refresh and our "Generated" Comment story will be ready to go: + +![image](https://user-images.githubusercontent.com/300/153475744-2e3151f9-b39c-4823-b2ef-539513cd4005.png) + +Let's think about what we want to ask users for and then display in a comment. How about just their name and the content of the comment itself? And we'll throw in the date/time it was created. Let's update the **Comment** component to accept a `comment` object with those three properties: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/Comment/Comment.jsx" +// highlight-next-line +const Comment = ({ comment }) => { + return ( + <div> + // highlight-start + <h2>{comment.name}</h2> + <time dateTime={comment.createdAt}>{comment.createdAt}</time> + <p>{comment.body}</p> + // highlight-end + </div> + ) +} + +export default Comment +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/components/Comment/Comment.tsx" +// highlight-start +// Just a temporary type. We'll replace this later +interface Props { + comment: { + name: string + createdAt: string + body: string + } +} +// highlight-end + +// highlight-next-line +const Comment = ({ comment }: Props) => { + return ( + <div> + // highlight-start + <h2>{comment.name}</h2> + <time dateTime={comment.createdAt}>{comment.createdAt}</time> + <p>{comment.body}</p> + // highlight-end + </div> + ) +} + +export default Comment +``` + +</TabItem> +</Tabs> + +Once you save that file and Storybook reloads you'll see it blow up: + +![image](https://user-images.githubusercontent.com/300/153475904-8f53cb09-3798-4e5a-9b6a-1ff1df98f93f.png) + +We need to update the story to include that comment object and pass it as a prop: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/Comment/Comment.stories.jsx" +import Comment from './Comment' + +export const generated = () => { + // highlight-start + return ( + <Comment + comment={{ + name: 'Rob Cameron', + body: 'This is the first comment!', + createdAt: '2020-01-01T12:34:56Z' + }} + /> + ) + // highlight-end +} + +export default { + title: 'Components/Comment', + component: Comment, +} +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/components/Comment/Comment.stories.tsx" +import Comment from './Comment' + +export const generated = () => { + // highlight-start + return ( + <Comment + comment={{ + name: 'Rob Cameron', + body: 'This is the first comment!', + createdAt: '2020-01-01T12:34:56Z' + }} + /> + ) + // highlight-end +} + +export default { + title: 'Components/Comment', + component: Comment, +} +``` + +</TabItem> +</Tabs> + +:::info + +Datetimes will come from GraphQL in [ISO8601 format](https://en.wikipedia.org/wiki/ISO_8601#Times) so we need to return one in that format here. + +::: + +Storybook will reload and be much happier: + +![image](https://user-images.githubusercontent.com/300/153476049-8ac31858-3014-47b5-807c-02b32d5a3ab0.png) + +Let's add a little bit of styling and date conversion to get this **Comment** component looking like a nice, completed design element: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/Comment/Comment.jsx" +// highlight-start +const formattedDate = (datetime) => { + const parsedDate = new Date(datetime) + const month = parsedDate.toLocaleString('default', { month: 'long' }) + return `${parsedDate.getDate()} ${month} ${parsedDate.getFullYear()}` +} +// highlight-end + +const Comment = ({ comment }) => { + return ( + // highlight-start + <div className="bg-gray-200 p-8 rounded-lg"> + <header className="flex justify-between"> + <h2 className="font-semibold text-gray-700">{comment.name}</h2> + <time className="text-xs text-gray-500" dateTime={comment.createdAt}> + {formattedDate(comment.createdAt)} + </time> + </header> + <p className="text-sm mt-2">{comment.body}</p> + </div> + // highlight-end + ) +} + +export default Comment +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/components/Comment/Comment.tsx" +// highlight-start +const formattedDate = (datetime: ConstructorParameters<typeof Date>[0]) => { + const parsedDate = new Date(datetime) + const month = parsedDate.toLocaleString('default', { month: 'long' }) + return `${parsedDate.getDate()} ${month} ${parsedDate.getFullYear()}` +} +// highlight-end + +// Just a temporary type. We'll replace this later +interface Props { + comment: { + name: string + createdAt: string + body: string + } +} + +const Comment = ({ comment }: Props) => { + return ( + // highlight-start + <div className="bg-gray-200 p-8 rounded-lg"> + <header className="flex justify-between"> + <h2 className="font-semibold text-gray-700">{comment.name}</h2> + <time className="text-xs text-gray-500" dateTime={comment.createdAt}> + {formattedDate(comment.createdAt)} + </time> + </header> + <p className="text-sm mt-2">{comment.body}</p> + </div> + // highlight-end + ) +} + +export default Comment +``` + +</TabItem> +</Tabs> + +![image](https://user-images.githubusercontent.com/300/153476305-017c6cf8-a2dd-4da0-a6ef-487d91a562df.png) + +Our component looks great! Now let's verify that it does what we want it to do with a test. + +### Testing + +We don't want Santa to skip our house so let's test our **Comment** component. We could test that the author's name and the body of the comment appear, as well as the date it was posted. + +The default test that comes with a generated component just makes sure that no errors are thrown, which is the least we could ask of our components! + +Let's add a sample comment to the test and check that the various parts are being rendered: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/Comment.test.jsx" +// highlight-next-line +import { render, screen } from '@redwoodjs/testing' + +import Comment from './Comment' + +describe('Comment', () => { + it('renders successfully', () => { + // highlight-start + const comment = { + name: 'John Doe', + body: 'This is my comment', + createdAt: '2020-01-02T12:34:56Z', + } + render(<Comment comment={comment} />) + + expect(screen.getByText(comment.name)).toBeInTheDocument() + expect(screen.getByText(comment.body)).toBeInTheDocument() + const dateExpect = screen.getByText('2 January 2020') + expect(dateExpect).toBeInTheDocument() + expect(dateExpect.nodeName).toEqual('TIME') + expect(dateExpect).toHaveAttribute('datetime', comment.createdAt) + // highlight-end + }) +}) +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/components/Comment.test.tsx" +// highlight-next-line +import { render, screen } from '@redwoodjs/testing' + +import Comment from './Comment' + +describe('Comment', () => { + it('renders successfully', () => { + // highlight-start + const comment = { + name: 'John Doe', + body: 'This is my comment', + createdAt: '2020-01-02T12:34:56Z', + } + render(<Comment comment={comment} />) + + expect(screen.getByText(comment.name)).toBeInTheDocument() + expect(screen.getByText(comment.body)).toBeInTheDocument() + const dateExpect = screen.getByText('2 January 2020') + expect(dateExpect).toBeInTheDocument() + expect(dateExpect.nodeName).toEqual('TIME') + expect(dateExpect).toHaveAttribute('datetime', comment.createdAt) + // highlight-end + }) +}) +``` + +</TabItem> +</Tabs> + +Here we're testing for both elements of the output `createdAt` timestamp: the actual text that's output (similar to how we tested for an article's truncated body) but also that the element that wraps that text is a `<time>` tag and that it contains a `datetime` attribute with the raw value of `comment.createdAt`. This might seem like overkill but the point of the `datetime` attribute is to provide a machine-readable timestamp that the browser could (theoretically) hook into and do stuff with. This makes sure that we preserve that ability. + +If your tests aren't already running in another terminal window, you can start them now: + +```bash +yarn rw test +``` + +:::info What happens if we change the formatted output of the timestamp? Wouldn't we have to change the test? + +Yes, just like we'd have to change the truncation text if we changed the length of the truncation. One alternative approach to testing for the formatted output could be to move the date formatting formula into a function that you can export from the `Comment` component. Then you can import that in your test and use it to check the formatted output. Now if you change the formula the test keeps passing because it's sharing the function with `Comment`. + +::: diff --git a/docs/versioned_docs/version-7.0/tutorial/chapter7/api-side-currentuser.md b/docs/versioned_docs/version-7.0/tutorial/chapter7/api-side-currentuser.md new file mode 100644 index 000000000000..bce5ffaa2efd --- /dev/null +++ b/docs/versioned_docs/version-7.0/tutorial/chapter7/api-side-currentuser.md @@ -0,0 +1,781 @@ +# Accessing currentUser in the API side + +As our blog has evolved into a multi-million dollar enterprise, we find ourselves so busy counting our money that we no longer have the time to write actual blog posts! Let's hire some authors to write them for us. + +What do we need to change to allow multiple users to create posts? Well, for one, we'll need to start associating a blog post to the author that wrote it so we can give them credit. We'll also want to display the name of the author when reading an article. Finally, we'll want to limit the list of blog posts that an author has access to edit to only their own: Alice shouldn't be able to make changes to Bob's articles. + +## Associating a Post to a User + +Let's introduce a relationship between a `Post` and a `User`, AKA a foreign key. This is considered a one-to-many relationship (one `User` has many `Post`s), similar to the relationship we created earlier between a `Post` and its associated `Comment`s. Here's what our new schema will look like: + +``` +┌─────────────────────┐ ┌───────────┐ +│ User │ │ Post │ +├─────────────────────┤ ├───────────┤ +│ id │───┐ │ id │ +│ name │ │ │ title │ +│ email │ │ │ body │ +│ hashedPassword │ └──<│ userId │ +│ ... │ │ createdAt │ +└─────────────────────┘ └───────────┘ +``` + +Making data changes like this will start becoming second nature soon: + +1. Add the new relationship the `schema.prisma` file +2. Migrate the database +3. Generate/update SDLs and Services + +### Add the New Relationship to the Schema + +First we'll add the new `userId` field to `Post` and the relation to `User`: + +```javascript title="api/db/schema.prisma" +model Post { + id Int @id @default(autoincrement()) + title String + body String + comments Comment[] + // highlight-start + user User @relation(fields: [userId], references: [id]) + userId Int + // highlight-end + createdAt DateTime @default(now()) +} + +model User { + id Int @id @default(autoincrement()) + name String? + email String @unique + hashedPassword String + salt String + resetToken String? + resetTokenExpiresAt DateTime? + roles String @default("moderator") + // highlight-next-line + posts Post[] +} +``` + +:::info User SDL +We created a User model in Chapter 4 when we set up authentication for our blog. Redwood's `setup auth dbAuth` command generated two files for us that manage authentication: the `auth` file in `api/src/lib/`, and the `auth` file in `api/src/functions/`. Both of these files use our PrismaClient directly to work with the User model, so we didn't need to set up an SDL or services for the User model. + +If you followed our recommendation in the Intermission to use the Example repo, the User SDL and service is already added for you. If not, you'll need to add it yourself: + +```bash +yarn rw g sdl User --no-crud +``` + +We'll comment out the sensitive fields of our GraphQL User type so there's no chance of them leaking: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```js title="api/src/graphql/users.sdl.js" + type User { + ... + # hashedPassword: String! + # salt: String! + # resetToken: String + # resetTokenExpiresAt: DateTime + } +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```ts title="api/src/graphql/users.sdl.ts" + type User { + ... + # hashedPassword: String! + # salt: String! + # resetToken: String + # resetTokenExpiresAt: DateTime + } +``` + +</TabItem> +</Tabs> +::: + +### Migrate the Database + +Next, migrate the database to apply the changes (when given the option, name the migration something like "add userId to post"): + +``` +yarn rw prisma migrate dev +``` + +Whoops! + +<img width="584" alt="image" src="https://user-images.githubusercontent.com/300/192899337-9cc1167b-e6da-42d4-83dc-d2a6c0cd1179.png" /> + +Similar to what happened when we added `roles` to `User`, We made `userId` a required field, but we already have several posts in our development database. Since we don't have a default value for `userId` defined, it's impossible to add this column to the database. + +:::warning Why don't we just set `@default(1)` in the schema? + +This would get us past this problem, but could cause hard-to-track-down bugs in the future: if you ever forget to assign a `post` to a `user`, rather than fail it'll happily just set `userId` to `1`, which may or may not even exist some day! It's best to take the extra time to do things The Right Way and avoid the quick hacks to get past an annoyance like this. Your future self will thank you! + +::: + +Since we're in development, let's just blow away the database and start over: + +``` +yarn rw prisma migrate reset +``` + +:::info Database Seeds + +If you started the second half the tutorial from the [Redwood Tutorial repo](https://github.com/redwoodjs/redwood-tutorial) you'll get an error after resetting the database—Prisma attempts to seed the database with a user and some posts to get you started, but the posts in that seed do not have the new required `userId` field! Open up `scripts/seed.js` and edit each post to add `userId: 1` to each: + +```javascript title="scripts/seed.js" +{ + id: 1, + name: 'John Doe', + title: 'Welcome to the blog!', + body: + "I'm baby single- origin coffee kickstarter lo - fi paleo skateboard.Tumblr hashtag austin whatever DIY plaid knausgaard fanny pack messenger bag blog next level woke.Ethical bitters fixie freegan,helvetica pitchfork 90's tbh chillwave mustache godard subway tile ramps art party. Hammock sustainable twee yr bushwick disrupt unicorn, before they sold out direct trade chicharrones etsy polaroid hoodie. Gentrify offal hoodie fingerstache.", + // highlight-next-line + userId: 1, +}, +``` + +Now run `yarn rw prisma migrate reset` and and...you'll get a different error. But that's okay, read on... + +::: + +We've got an error here because running a database `reset` doesn't also apply pending migrations. So we're trying to set a `userId` where one doesn't exist in the database (it does exist in Prisma generated client libs though, so it thinks that there *should* be one, even if it doesn't exist in the database yet). + +It may feel like we're stuck, but note that the database did reset successfully, it's just the seed that failed. So now let's migrate the database to add the new `userId` to `Post`, and then re-run the seed to populate the database, naming it something like "add userId to post": + +``` +yarn rw prisma migrate dev +``` + +And then the seed: + +``` +yarn rw prisma db seed +``` + +:::info + +If you didn't start your codebase from the [Redwood Tutorial repo](https://github.com/redwoodjs/redwood-tutorial) then you'll now have no users or posts in the database. Go ahead and create a user by going to [http://localhost:8910/signup](http://localhost:8910/signup) but don't create any posts yet! Change the user's role to be "admin", either by using the console introduced in the [previous page](/docs/canary/tutorial/chapter7/rbac#changing-roles-on-a-user) or by [opening Prisma Studio](/docs/canary/tutorial/chapter2/getting-dynamic#prisma-studio) and changing it directly in the database. + +::: + +### Add Fields to the SDL and Service + +Let's think about where we want to show our new relationship. For now, probably just on the homepage and article page: we'll display the author of the post next to the title. That means we'll want to access the user from the post in a GraphQL query something like this: + +```graphql +post { + id + title + body + createdAt + user { + name + } +} +``` + +To enable this we'll need to make two modifications on the api side: + +1. Add the `user` field to the `posts` SDL +2. Add a **relation resolver** for the `user` in the `posts` service + +#### Add User to Posts SDL + +```javascript title="api/src/graphql/posts.sdl.js" + type Post { + id: Int! + title: String! + body: String! + createdAt: DateTime! + // highlight-next-line + user: User! + } +``` + +:::info What about the mutations? + +We did *not* add `user` or `userId` to the `CreatePostInput` or `UpdatePostInput` types. Although we want to set a user on each newly created post, we don't want just anyone to do that via a GraphQL call! You could easily create or edit a post and assign it to someone else by just modifying the GraphQL payload. We'll save assigning the user to just the service, so it can't be manipulated by the outside world. + +::: + +Here we're using `User!` with an exclamation point because we know that every `Post` will have an associated user to it—this field will never be `null`. + +#### Add User Relation Resolver + +This one is a little tricker: we need to add a "lookup" in the `posts` service, so that it knows how to get the associated user. When we generated the `comments` SDL and service we got this **relation resolver** created for us. We could re-run the service generator for `Post` but that could blow away changes we made to this file. Our only option would be to include the `--force` flag since the file already exists, which will write over everything. In this case we'll just add the resolver manually: + +```javascript title="api/src/services/posts/posts.js" +import { db } from 'src/lib/db' + +export const posts = () => { + return db.post.findMany() +} + +export const post = ({ id }) => { + return db.post.findUnique({ + where: { id }, + }) +} + +export const createPost = ({ input }) => { + return db.post.create({ + data: input, + }) +} + +export const updatePost = ({ id, input }) => { + return db.post.update({ + data: input, + where: { id }, + }) +} + +export const deletePost = ({ id }) => { + return db.post.delete({ + where: { id }, + }) +} + +// highlight-start +export const Post = { + user: (_obj, { root }) => + db.post.findFirst({ where: { id: root.id } }).user(), +} +// highlight-end +``` + +This can be non-intuitive so let's step through it. First, declare a variable with the same name as the model this service is for: `Post` for the `posts` service. Now, set that to an object containing keys that are the same as the fields that are going to be looked up, in this case `user`. When GraphQL invokes this function it passes a couple of arguments, one of which is `root` which is the object that was resolved to start with, in this case the `post` in our GraphQL query: + +```graphql +post { <- root + id + title + body + createdAt + user { + name + } +} +``` + +That post will already be retrieved from the database, and so we know its `id`. `root` is that object, so can simply call `.id` on it to get that property. Now we know everything we need to to make a `findFirst()` query in Prisma, giving it the `id` of the record we already found, but returning the `user` associated to that record, rather than the `post` itself. + +We could also write this resolver as follows: + +```javascript +export const Post = { + user: (_obj, { root }) => + db.user.findFirst({ where: { id: root.userId } }), +} +``` + +Note that if you keep the relation resolver above, but also included a `user` property in the post(s) returned from `posts` and `post`, this field resolver will still be invoked and whatever is returned will override any `user` property that exists already. Why? That's just how GraphQL works—resolvers, if they are present for a named field, will always be invoked and their return value used, even if the `root` already contains that data. + +:::info Prisma and the N+1 Problem + +If you have any experience with database design and retrieval you may have noticed this method presents a less than ideal solution: for every post that's found, you need to perform an *additional* query just to get the user data associated with that `post`, also known as the [N+1 problem](https://medium.com/the-marcy-lab-school/what-is-the-n-1-problem-in-graphql-dd4921cb3c1a). This is just due to the nature of GraphQL queries: each resolver function really only knows about its own parent object, nothing about potential children. + +There have been several attempts to work around this issue. A simple one that includes no extra dependencies is to remove this field resolver and simply include `user` data along with any `post` you retrieve from the database: + +```javascript +export const post = ({ id }) => { + return db.post.findUnique({ + where: { id }, + include: { + user: true + } + }) +} +``` + +This may or may not work for you: you are incurring the overhead of always returning user data, even if that data wasn't requested in the GraphQL query. In addition, this breaks further nesting of queries: what if you wanted to return the user for this post, and a list of all the other posts IDs that they created? + +```graphql +post { + id + title + body + createdAt + user { + name + posts { + id + } + } +} +``` + +This query would now fail because you only have `post.user` available, not `post.user.posts`. + +The Redwood team is actively looking into more elegant solutions to the N+1 problem, so stay tuned! + +::: + +## Displaying the Author + +In order to get the author info we'll need to update our Cell queries to pull the user's name. + +There are two places where we publicly present a post: + +1. The homepage +2. A single article page + +Let's update their respective Cells to include the name of the user that created the post: + +```jsx title="web/src/components/ArticlesCell/ArticlesCell.js" +export const QUERY = gql` + query ArticlesQuery { + articles: posts { + id + title + body + createdAt + // highlight-start + user { + name + } + // highlight-end + } + } +` +``` + +```jsx title="web/src/components/ArticleCell/ArticleCell.js" +export const QUERY = gql` + query ArticleQuery($id: Int!) { + article: post(id: $id) { + id + title + body + createdAt + // highlight-start + user { + name + } + // highlight-end + } + } +` +``` + +And then update the display component that shows an Article: + +```jsx title="web/src/components/Article/Article.js" +import { Link, routes } from '@redwoodjs/router' + +const Article = ({ article }) => { + return ( + <article> + <header> + <h2 className="text-xl text-blue-700 font-semibold"> + <Link to={routes.article({ id: article.id })}>{article.title}</Link> + // highlight-start + <span className="ml-2 text-gray-400 font-normal"> + by {article.user.name} + </span> + // highlight-end + </h2> + </header> + + <div className="mt-2 text-gray-900 font-light">{article.body}</div> + </article> + ) +} + +export default Article +``` + +Depending on whether you started from the Redwood Tutorial repo or not, you may not have any posts to actually display. Let's add some! However, before we can do that with our posts admin/scaffold, we'll need to actually associate a user to the post they created. Remember that we don't allow setting the `userId` via GraphQL, which is what the scaffolds use when creating/editing records. But that's okay, we want this to only happen in the service anyway, which is where we're heading now. + +## Accessing `currentUser` on the API side + +There's a magical variable named `context` that's available within any of your service functions. It contains the context in which the service function is being called. One property available on this context is the user that's logged in (*if* someone is logged in). It's the same `currentUser` that is available on the web side: + +```javascript title="api/src/service/posts/posts.js" +export const createPost = ({ input }) => { + return db.post.create({ + // highlight-next-line + data: { ...input, userId: context.currentUser.id } + }) +} +``` + +So `context.currentUser` will always be around if you need access to the user that made this request. We'll take their user `id` and appened it the rest of the incoming data from the scaffold form when creating a new post. Let's try it out! + +You should be able to create a post via the admin now: + +<img width="937" alt="image" src="https://user-images.githubusercontent.com/300/193152401-d98b488e-dd71-475a-a78c-6cd5233e5bee.png" /> + +And going back to the hompage should actually start showing posts and their authors! + +<img width="937" alt="image" src="https://user-images.githubusercontent.com/300/193152524-2715e49d-a1c3-43a1-b968-84a4f8ae3846.png" /> + +## Only Show a User Their Posts in Admin + +Right now any admin that visits `/admin/posts` can still see all posts, not only their own. Let's change that. + +Since we know we have access to `context.currentUser` we can sprinkle it throughout our posts service to limit what's returned to only those posts that the currently logged in user owns: + +```javascript title="api/src/services/posts/posts.js" +import { db } from 'src/lib/db' + +export const posts = () => { + // highlight-next-line + return db.post.findMany({ where: { userId: context.currentUser.id } }) +} + +export const post = ({ id }) => { + // highlight-start + return db.post.findFirst({ + where: { id, userId: context.currentUser.id }, + }) + // highlight-end +} + +export const createPost = ({ input }) => { + return db.post.create({ + data: { ...input, userId: context.currentUser.id }, + }) +} + +export const updatePost = ({ id, input }) => { + return db.post.update({ + data: input, + where: { id }, + }) +} + +export const deletePost = ({ id }) => { + return db.post.delete({ + where: { id }, + }) +} + +export const Post = { + user: (_obj, { root }) => + db.post.findFirst({ where: { id: root.id } }).user(), +} +``` + +:::info Prisma's `findUnique()` vs. `findFirst()` + +Note that we switched from `findUnique()` to `findFirst()` here. Prisma's `findUnique()` requires that any attributes in the `where` clause have unique indexes, which `id` does, but `userId` does not. So we need to switch to the `findFirst()` function which allows you to put whatever you want in the `where`, which may return more than one record, but Prisma will only return the first of that set. In this case we know there'll always only be one, because we're selecting by `id` *in addition* to `userId`. + +::: + +These changes make sure that a user can only see a list of their own posts, or the detail for a single post that they own. + +What about `updatePost` and `deletePost`? They aren't limited to just the `currentUser`, which would let anyone update or delete a post if they made a manual GraphQL call! That's not good. We'll deal with those [a little later](#update-and-delete). + +But there's a problem with the updates we just made: doesn't the homepage also use the `posts` service to display all the articles for the homepage? This code update would limit the homepage to only showing a logged in user's own posts and no one else! And what happens if someone who is *not* logged in goes to the homepage? ERROR. + +How can we return one list of posts in the admin, and a different list of posts for the homepage? + +## An AdminPosts Service + +We could go down the road of adding variables in the GraphQL queries, along with checks in the existing `posts` service, that return a different list of posts whether you're on the homepage or in the admin. But this complexity adds a lot of surface area to test and some fragility if someone goes in there in the future—they have to be very careful not to add a new condition or negate an existing one and accidentally expose your admin functionality to exploits. + +What if we created *new* GraphQL queries for the admin views of posts? They would have automatic security checks thanks to `@requireAdmin`, no custom code required. These new queries will be used in the admin posts pages, and the original, simple `posts` service will be used for the homepage and article detail page. + +There are several steps we'll need to complete: + +1. Create a new `adminPosts` SDL that defines the types +2. Create a new `adminPosts` service +3. Update the posts admin GraphQL queries to pull from `adminPosts` instead of `posts` + +### Create the `adminPosts` SDL + +Let's keep the existing `posts.sdl.js` and make that the "public" interface. Duplicate that SDL, naming it `adminPosts.sdl.js`, and modify it like so: + +```javascript title="api/src/graphql/adminPosts.sdl.js" +export const schema = gql` + type Query { + adminPosts: [Post!]! @requireAuth(roles: ["admin"]) + adminPost(id: Int!): Post @requireAuth(roles: ["admin"]) + } + + input CreatePostInput { + title: String! + body: String! + } + + input UpdatePostInput { + title: String + body: String + } + + type Mutation { + createPost(input: CreatePostInput!): Post! @requireAuth(roles: ["admin"]) + updatePost(id: Int!, input: UpdatePostInput!): Post! @requireAuth(roles: ["admin"]) + deletePost(id: Int!): Post! @requireAuth(roles: ["admin"]) + } +` +``` + +```javascript title="api/src/graphql/posts.sdl.js" +export const schema = gql` + type Post { + id: Int! + title: String! + body: String! + createdAt: DateTime! + user: User! + } + + type Query { + posts: [Post!]! @skipAuth + post(id: Int!): Post @skipAuth + } +` +``` + +So we keep a single type of `Post` since the data contained within it is the same, and either SDL file will return this same data type. We can remove the mutations from the `posts` SDL since the general public will not need to access those. We move create, update and delete mutations to the new `adminPosts` SDL, and rename the two queries from `posts` to `adminPosts` and `post` to `adminPost`. In case you didn't know: every query/mutation must have a unique name across your entire application! + +In `adminPosts` we've updated the queries to use `@requireAuth` instead of `@skipAuth`. Now that we have dedicated queries for our admin pages, we can lock them down to only allow access when authenticated. + +### Create the `adminPosts` Service + +Next let's create an `adminPosts` service. We'll need to move our create/update/delete mutations to it, as the name of the SDL needs to match the name of the service: + +```javascript title="api/src/services/adminPosts/adminPosts.js" +import { db } from 'src/lib/db' + +export const adminPosts = () => { + return db.post.findMany({ where: { userId: context.currentUser.id } }) +} + +export const adminPost = ({ id }) => { + return db.post.findFirst({ + where: { id, userId: context.currentUser.id }, + }) +} + +export const createPost = ({ input }) => { + return db.post.create({ + data: { ...input, userId: context.currentUser.id }, + }) +} + +export const updatePost = ({ id, input }) => { + return db.post.update({ + data: input, + where: { id }, + }) +} + +export const deletePost = ({ id }) => { + return db.post.delete({ + where: { id }, + }) +} +``` + +(Again, don't forget the change from `findUnique()` to `findFirst()`.) And update `posts` to remove some of the functions that live in `adminPosts` now: + +```javascript title="api/src/services/posts/posts.js" +import { db } from 'src/lib/db' + +export const posts = () => { + return db.post.findMany() +} + +export const post = ({ id }) => { + return db.post.findUnique({ where: { id } }) +} + +export const Post = { + user: (_obj, { root }) => + db.post.findFirst({ where: { id: root.id } }).user(), +} +``` + +We've removed the `userId` lookup in the `posts` service so we're back to returning every post (for `posts`) or a single post (regardless of who owns it, in `post`). + +Note that we kept the relation resolver here `Post.user`, and there's none in `adminPosts`: since the queries and mutations from both SDLs still return a `Post`, we'll want to keep that relation resolver with the service that matches that original SDL by name: `graphql/posts.sdl.js` => `services/posts/posts.js`. + +### Update the GraphQL Queries + +Finally, we'll need to update several of the scaffold components to use the new `adminPosts` and `adminPost` queries (we'll limit the code snippets below to just the changes to save some room, this page is getting long enough!): + +```javascript title="web/src/components/Post/EditPostCell/EditPostCell.js" +export const QUERY = gql` + query EditPostById($id: Int!) { + // highlight-next-line + post: adminPost(id: $id) { + id + title + body + createdAt + } + } +` +``` + +```jsx title="web/src/components/Post/PostCell/PostCell.js" +export const QUERY = gql` + query FindPostById($id: Int!) { + // highlight-next-line + post: adminPost(id: $id) { + id + title + body + createdAt + } + } +` +``` + +```jsx title="web/src/components/Post/PostsCell/PostsCell.js" +export const QUERY = gql` + query POSTS { + // highlight-next-line + posts: adminPosts { + id + title + body + createdAt + } + } +` +``` + +If we didn't use the `posts: adminPosts` syntax, we would need to rename the argument coming into the `Success` component below to `adminPosts`. This syntax renames the result of the query to `posts` and then nothing else below needs to change! + +We don't need to make any changes to the "public" views (like `ArticleCell` and `ArticlesCell`) since those will continue to use the original `posts` and `post` queries, and their respective resolvers. + +## Update and Delete + +Okay, let's take care of `updatePost` and `deletePost` now. Why couldn't we just do this? + +```javascript +export const updatePost = ({ id, input }) => { + return db.post.update({ + data: input, + // highlight-next-line + where: { id, userId: context.currentUser.id }, + }) +} +``` + +Because like `findUnique()`, Prisma only wants to update records based on fields with unique indexes, in this case that's just `id`. So we need to keep this to just an `id`. But how do we verify that the user is only updating/deleting a record that they own? + +We could select the record first, make sure the user owns it, and only then let the `update()` commence: + +```javascript +// highlight-next-line +import { ForbiddenError } from '@redwoodjs/graphql-server' + +// highlight-start +export const updatePost = async ({ id, input }) => { + if (await adminPost({ id })) { + return db.post.update({ + data: input, + where: { id }, + }) + } else { + throw new ForbiddenError("You don't have access to this post") + } +} +``` + +We're using the `adminPost()` service function, rather than making another call to the database (note that we had to async/await it to make sure we have the post before continuing). Composing services like this is something Redwood was designed to encourage: services' functions act as resolvers for GraphQL, but they're also just plain JS functions and can be called wherever you need. And the reasons why you'd want to do this are clearly demonstrated here: `adminPost()` already limits the found record to be only one owned by the logged in user, so that logic is already encapsulated here, and we can be sure that any time an admin wants to do something with a single post, it runs through this code and uses the same logic every time. + +This works, but we'll need to do the same thing in `deletePost`. Let's extract that check for the post existence into a function: + +```javascript +// highlight-start +const verifyOwnership = async ({ id }) => { + if (await adminPost({ id })) { + return true + } else { + throw new ForbiddenError("You don't have access to this post") + } +} +// highlight-end + +export const updatePost = async ({ id, input }) => { + // highlight-next-line + await verifyOwnership({ id }) + + return db.post.update({ + data: input, + where: { id }, + }) +} +``` + +Simple! Our final `adminPosts` service ends up looking like: + +```javascript +import { ForbiddenError } from '@redwoodjs/graphql-server' + +import { db } from 'src/lib/db' + +const verifyOwnership = async ({ id }) => { + if (await adminPost({ id })) { + return true + } else { + throw new ForbiddenError("You don't have access to this post") + } +} + +export const adminPosts = () => { + return db.post.findMany({ where: { userId: context.currentUser.id } }) +} + +export const adminPost = ({ id }) => { + return db.post.findFirst({ + where: { id, userId: context.currentUser.id }, + }) +} + +export const createPost = ({ input }) => { + return db.post.create({ + data: { ...input, userId: context.currentUser.id }, + }) +} + +export const updatePost = async ({ id, input }) => { + await verifyOwnership({ id }) + + return db.post.update({ + data: input, + where: { id }, + }) +} + +export const deletePost = async ({ id }) => { + await verifyOwnership({ id }) + + return db.post.delete({ + where: { id }, + }) +} + +``` + +## Wrapping Up + +Whew! Let's try several different scenarios (this is the kind of thing that the QA team lives for), making sure everything is working as expected: + +* A logged out user *should* see all posts on the homepage +* A logged out user *should* be able to see the detail for a single post +* A logged out user *should not* be able to go to /admin/posts +* A logged out user *should not* see moderation controls next to comments +* A logged in admin user *should* see all articles on the homepage (not just their own) +* A logged in admin user *should* be able to go to /admin/posts +* A logged in admin user *should* be able to create a new post +* A logged in admin user *should not* be able to see anyone else's posts in /admin/posts +* A logged in admin user *should not* see moderation controls next to comments (unless you modified that behavior at the end of the last page) +* A logged in moderator user *should* see moderation controls next to comments +* A logged in moderator user *should not* be able to access /admin/posts + +In fact, you could write some new tests to make sure this functionality doesn't mistakenly change in the future. The quickest would probably be to create `adminPosts.scenarios.js` and `adminPosts.test.js` files to go with the new service and verify that you are only returned the posts owned by a given user. You can [mock currentUser](/docs/testing#mockcurrentuser-on-the-api-side) to simulate someone being logged in or not, with different roles. You could add tests for the Cells we modified above, but the data they get is dependent on what's returned from the service, so as long as you have the service itself covered you should be okay. The 100% coverage folks would argue otherwise, but while they're still busy writing tests we're out cruising in our new yacht thanks to all the revenue from our newly launched (with *reasonable* test coverage) features! + +Did it work? Great! Did something go wrong? Can someone see too much, or too little? Double check that all of your GraphQL queries are updated and you've saved changes in all the opened files. diff --git a/docs/versioned_docs/version-7.0/tutorial/chapter7/rbac.md b/docs/versioned_docs/version-7.0/tutorial/chapter7/rbac.md new file mode 100644 index 000000000000..490b98a4d5ac --- /dev/null +++ b/docs/versioned_docs/version-7.0/tutorial/chapter7/rbac.md @@ -0,0 +1,1318 @@ +# Role-Based Access Control (RBAC) + +Imagine a few weeks in the future of our blog when every post hits the front page of the New York Times and we're getting hundreds of comments a day. We can't be expected to come up with quality content each day *and* moderate the endless stream of (mostly well-meaning) comments! We're going to need help. Let's hire a comment moderator to remove obvious spam and any comments that don't heap praise on our writing ability. You know, to help make the internet a better place. + +We already have a login system for our blog, but right now it's all-or-nothing: you either get access to create blog posts, or you don't. In this case our comment moderator(s) will need logins so that we know who they are, but we're not going to let them create new blog posts. We need some kind of role that we can give to our two kinds of users so we can distinguish them from one another. + +Enter **role-based access control**, thankfully shortened to the common phrase **RBAC**. Authentication says who the person is, authorization says what they can do. "Access control" is another way to say authorization. Currently the blog has the lowest common denominator of authorization: if they are logged in, they can do everything. Let's add a "less than everything, but more than nothing" level. + +### Defining Roles + +We've got a User model so let's add a `roles` property to that: + +```javascript title="api/db/schema.prisma" +model User { + id Int @id @default(autoincrement()) + name String? + email String @unique + hashedPassword String + salt String + resetToken String? + resetTokenExpiresAt DateTime? + // highlight-next-line + roles String +} +``` + +Next we'll (try) to migrate the database: + + +```bash +yarn rw prisma migrate dev +``` + +But that will fail with an error: + +``` +• Step 0 Added the required column `role` to the `User` table without a default value. There are 1 rows in this table, it is not possible to execute this step. +``` + +What does this mean? We made `roles` a required field. But, we have a user in the database already (`1 rows in this table`). If we add that column to the database, it would have to be `null` for existing users since we didn't define a default. Let's create a default value so that not only can we apply this migration, but we're sure that any new users being created have some minimal level of permissions and we don't have to add even more code to check whether they have a role at all, let alone what it is. + +For now let's have two roles, `admin` and `moderator`. `admin` can create/edit/delete blog posts and `moderator` can only remove comments. Of those two `moderator` is the safer default since it's more restrictive: + +```javascript title="api/db/schema.prisma" +model User { + id Int @id @default(autoincrement()) + name String? + email String @unique + hashedPassword String + salt String + resetToken String? + resetTokenExpiresAt DateTime? + // highlight-next-line + roles String @default("moderator") +} +``` + +Now the migration should be able to be applied: + +```bash +yarn rw prisma migrate dev +``` + +And you can name it something like "add roles to user". + +If we log in and try to go the posts admin page at [http://localhost:8910/admin/posts](http://localhost:8910/admin/posts) everything works the same as it used to: we're not actually checking for the existence of any roles yet so that makes sense. In reality we'd only want users with the `admin` role to have access to the admin pages, but our existing user just became a `moderator` because of our default role. This is a great opportunity to actually setup a role check and see if we lose access to the admin! + +Before we do that, we'll need to make sure that the web side has access to the roles on `currentUser`. Take a look at `api/src/lib/auth.js`. Remember when we had to add `email` to the list of fields being included? We need to add `roles` as well: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```javascript title="api/src/lib/auth.js" +export const getCurrentUser = async (session) => { + return await db.user.findUnique({ + where: { id: session.id }, + // highlight-next-line + select: { id: true, email: true, roles: true }, + }) +} +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```javascript title="api/src/lib/auth.ts" +export const getCurrentUser = async (session) => { + return await db.user.findUnique({ + where: { id: session.id }, + // highlight-next-line + select: { id: true, email: true, roles: true }, + }) +} +``` + +</TabItem> +</Tabs> + +<ShowForTs> + +### Fixing the hasRole function + +At this point, you might notice an error in your `api/src/lib/auth.ts` file, in the `hasRole` function. TypeScript is trying to help you here, by highlighting that roles can never be an array of strings: + +```ts title="api/src/lib/auth.ts" +export const hasRole = (roles: AllowedRoles): boolean => { + + // ... + + } else if (Array.isArray(currentUserRoles)) { + // 👇 TypeScript will now be telling you 'some' doesn't exist on type never: + // highlight-next-line + return currentUserRoles?.some((allowedRole) => roles === allowedRole) + } + } + ``` + +This is because we now know that the type of `currentUser.roles` is a `string` based on the type being returned from Prisma. So you can safely remove the block of code where it's checking if roles is an array: + +```diff title="api/src/lib/auth.ts" +export const hasRole = (roles: AllowedRoles): boolean => { + if (!isAuthenticated()) { + return false + } + + const currentUserRoles = context.currentUser?.roles + + if (typeof roles === 'string') { + if (typeof currentUserRoles === 'string') { + // roles to check is a string, currentUser.roles is a string + return currentUserRoles === roles +- } else if (Array.isArray(currentUserRoles)) { +- // roles to check is a string, currentUser.roles is an array +- return currentUserRoles?.some((allowedRole) => roles === allowedRole) + } + } + + if (Array.isArray(roles)) { + if (Array.isArray(currentUserRoles)) { + // roles to check is an array, currentUser.roles is an array + return currentUserRoles?.some((allowedRole) => + roles.includes(allowedRole) + ) + } else if (typeof currentUserRoles === 'string') { + // roles to check is an array, currentUser.roles is a string + return roles.some((allowedRole) => currentUserRoles === allowedRole) + } + } + + // roles not found + return false +} +``` + +</ShowForTs> + +### Restricting Access via Routes + +The easiest way to prevent access to an entire URL is via the Router. The `<PrivateSet>` component takes a prop `roles` in which you can give a list of only those role(s) that should have access: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/Routes.jsx" +// highlight-next-line +<PrivateSet unauthenticated="home" roles="admin"> + <Set wrap={ScaffoldLayout} title="Posts" titleTo="posts" buttonLabel="New Post" buttonTo="newPost"> + <Route path="/admin/posts/new" page={PostNewPostPage} name="newPost" /> + <Route path="/admin/posts/{id:Int}/edit" page={PostEditPostPage} name="editPost" /> + <Route path="/admin/posts/{id:Int}" page={PostPostPage} name="post" /> + <Route path="/admin/posts" page={PostPostsPage} name="posts" /> + </Set> +</PrivateSet> +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/Routes.tsx" +// highlight-next-line +<PrivateSet unauthenticated="home" roles="admin"> + <Set wrap={ScaffoldLayout} title="Posts" titleTo="posts" buttonLabel="New Post" buttonTo="newPost"> + <Route path="/admin/posts/new" page={PostNewPostPage} name="newPost" /> + <Route path="/admin/posts/{id:Int}/edit" page={PostEditPostPage} name="editPost" /> + <Route path="/admin/posts/{id:Int}" page={PostPostPage} name="post" /> + <Route path="/admin/posts" page={PostPostsPage} name="posts" /> + </Set> +</PrivateSet> +``` + +</TabItem> +</Tabs> + +Now if you browse to [http://localhost:8910/admin/posts](http://localhost:8910/admin/posts) you should get redirected to the homepage. So far so good. + +### Changing Roles on a User + +Let's use the Redwood console again to quickly update our admin user to actually have the `admin` role: + +```bash +yarn rw c +``` + +:::tip + +You can use the `c` shortcut instead of `console` + +::: + +Now we can update our user with a single command: + +```bash +> db.user.update({ where: { id: 1 } , data: { roles: 'admin' } }) +``` + +Which should return the new content of the user: + +```bash +{ + id: 1, + name: null, + email: 'admin@admin.com', + hashedPassword: 'a12f3975a3722953fd8e326dd108d5645ad9563042fe9f154419361eeeb775d8', + salt: '9abf4665293211adce1c99de412b219e', + resetToken: null, + resetTokenExpiresAt: null, + roles: 'admin' +} +``` + +:::warning + +If you re-used the same console session from the previous section, you'll need to quit it and start it again for it to know about the new Prisma data structure. If you still can't get the update to work, maybe your user doesn't have an `id` of `1`! Run `db.user.findMany()` first and then get the `id` of the user you want to update. + +::: + +Now head back to [http://localhost:8910/admin/posts](http://localhost:8910/admin/posts) and we should have access again. As the British say: brilliant! + +### Add a Moderator + +Let's create a new user that will represent the comment moderator. Since this is in development you can just make up an email address, but if you needed to do this in a real system that verified email addresses you could use **The Plus Trick** to create a new, unique email address that is actually the same as your original email address! + +:::tip The Plus Trick + +The Plus Trick is a very handy feature of the email standard known as a "boxname", the idea being that you may have other incoming boxes besides one just named "Inbox" and by adding `+something` to your email address you can specify which box the mail should be sorted into. They don't appear to be in common use these days, but they are ridiculously helpful for us developers when we're constantly needing new email addresses for testing: it gives us an infinite number of *valid* email addresses—they all come to your regular inbox! + +Just append +something to your email address before the @: + +* `jane.doe+testing@example.com` will go to `jane.doe@example.com` +* `dom+20210909@example.com` will go to `dom@example.com` + +Note that not all providers support this plus-based syntax, but the major ones (Gmail, Yahoo, Microsoft, Apple) do. If you find that you're not receiving emails at your own domain, you may want to create a free account at one of these providers just to use for testing. + +::: + +In our case we're not sending emails anywhere, and don't require them to be verified, so you can just use a made-up email for now. `moderator@moderator.com` has a nice ring to it. + +:::info + +If you disabled the new user signup as suggested at the end of the first part of the tutorial then you'll have a slightly harder time creating a new user (the Signup page is still enabled in the example repo for convenience). You could create one with the Redwood console, but you'll need to be clever—remember that we don't store the original password, just the hashed result when combined with a salt. Here's the commands to enter at the console for creating a new user (replace 'password' with your password of choice): + +```javascript +const CryptoJS = require('crypto-js') +const salt = CryptoJS.lib.WordArray.random(128 / 8).toString() +const hashedPassword = CryptoJS.PBKDF2('password', salt, { keySize: 256 / 32 }).toString() +db.user.create({ data: { email: 'moderator@moderator.com', hashedPassword, salt } }) +``` + +::: + +Now if you log out as the admin and log in as the moderator you should *not* have access to the posts admin. + +### Restrict Access in a Component + +Locking down a whole page is easy enough via the Router, but what about individual functionality within a page or component? + +Redwood provides a `hasRole()` function you can get from the `useAuth()` hook (you may recall us using that to get `currentUser` and display their email address in Part 1) which returns `true` or `false` depending on whether the logged in user has the given role. Let's try it out by adding a `Delete` button when a moderator is viewing a blog post's comments: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/Comment/Comment.jsx" +// highlight-next-line +import { useAuth } from 'src/auth' + +const formattedDate = (datetime) => { + const parsedDate = new Date(datetime) + const month = parsedDate.toLocaleString('default', { month: 'long' }) + return `${parsedDate.getDate()} ${month} ${parsedDate.getFullYear()}` +} + +const Comment = ({ comment }) => { + // highlight-start + const { hasRole } = useAuth() + const moderate = () => { + if (confirm('Are you sure?')) { + // TODO: delete comment + } + } + // highlight-end + + return ( + // highlight-next-line + <div className="bg-gray-200 p-8 rounded-lg relative"> + <header className="flex justify-between"> + <h2 className="font-semibold text-gray-700">{comment.name}</h2> + <time className="text-xs text-gray-500" dateTime={comment.createdAt}> + {formattedDate(comment.createdAt)} + </time> + </header> + <p className="text-sm mt-2">{comment.body}</p> + // highlight-start + {hasRole('moderator') && ( + <button + type="button" + onClick={moderate} + className="absolute bottom-2 right-2 bg-red-500 text-xs rounded text-white px-2 py-1" + > + Delete + </button> + )} + // highlight-end + </div> + ) +} + +export default Comment +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/components/Comment/Comment.tsx" +// highlight-next-line +import { useAuth } from 'src/auth' + +const formattedDate = (datetime: ConstructorParameters<typeof Date>[0]) => { + const parsedDate = new Date(datetime) + const month = parsedDate.toLocaleString('default', { month: 'long' }) + return `${parsedDate.getDate()} ${month} ${parsedDate.getFullYear()}` +} + +interface Props { + comment: { + name: string + createdAt: string + body: string + } +} + +const Comment = ({ comment }: Props) => { + // highlight-start + const { hasRole } = useAuth() + const moderate = () => { + if (confirm('Are you sure?')) { + // TODO: delete comment + } + } + // highlight-end + + return ( + // highlight-next-line + <div className="bg-gray-200 p-8 rounded-lg relative"> + <header className="flex justify-between"> + <h2 className="font-semibold text-gray-700">{comment.name}</h2> + <time className="text-xs text-gray-500" dateTime={comment.createdAt}> + {formattedDate(comment.createdAt)} + </time> + </header> + <p className="text-sm mt-2">{comment.body}</p> + // highlight-start + {hasRole('moderator') && ( + <button + type="button" + onClick={moderate} + className="absolute bottom-2 right-2 bg-red-500 text-xs rounded text-white px-2 py-1" + > + Delete + </button> + )} + // highlight-end + </div> + ) +} + +export default Comment +``` + +</TabItem> +</Tabs> + +![image](https://user-images.githubusercontent.com/300/101229168-c75edb00-3653-11eb-85f0-6eb61af7d4e6.png) + +So if the user has the "moderator" role, render the delete button. If you log out and back in as the admin, or if you log out completely, you'll see the delete button go away. When logged out (that is, `currentUser === null`) `hasRole()` will always return `false`. + +What should we put in place of the `// TODO` note we left ourselves? A GraphQL mutation that deletes a comment, of course. Thanks to our forward-thinking earlier we already have a `deleteComment()` service function and GraphQL mutation ready to go. + +And due to the nice encapsulation of our **Comment** component we can make all the required web-site changes in this one component: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/Comment/Comment.jsx" +// highlight-next-line +import { useMutation } from '@redwoodjs/web' + +import { useAuth } from 'src/auth' + +// highlight-next-line +import { QUERY as CommentsQuery } from 'src/components/CommentsCell' + +// highlight-start +const DELETE = gql` + mutation DeleteCommentMutation($id: Int!) { + deleteComment(id: $id) { + postId + } + } +` +// highlight-end + +const formattedDate = (datetime) => { + const parsedDate = new Date(datetime) + const month = parsedDate.toLocaleString('default', { month: 'long' }) + return `${parsedDate.getDate()} ${month} ${parsedDate.getFullYear()}` +} + +const Comment = ({ comment }) => { + const { hasRole } = useAuth() + // highlight-start + const [deleteComment] = useMutation(DELETE, { + refetchQueries: [ + { + query: CommentsQuery, + variables: { postId: comment.postId }, + }, + ], + }) + // highlight-end + + const moderate = () => { + if (confirm('Are you sure?')) { + // highlight-start + deleteComment({ + variables: { id: comment.id }, + }) + // highlight-end + } + } + + return ( + <div className="bg-gray-200 p-8 rounded-lg relative"> + <header className="flex justify-between"> + <h2 className="font-semibold text-gray-700">{comment.name}</h2> + <time className="text-xs text-gray-500" dateTime={comment.createdAt}> + {formattedDate(comment.createdAt)} + </time> + </header> + <p className="text-sm mt-2">{comment.body}</p> + {hasRole('moderator') && ( + <button + type="button" + onClick={moderate} + className="absolute bottom-2 right-2 bg-red-500 text-xs rounded text-white px-2 py-1" + > + Delete + </button> + )} + </div> + ) +} + +export default Comment +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/components/Comment/Comment.tsx" +// highlight-next-line +import { useMutation } from '@redwoodjs/web' + +import { useAuth } from 'src/auth' + +// highlight-next-line +import { QUERY as CommentsQuery } from 'src/components/CommentsCell' + +// highlight-next-line +import type { Comment as IComment } from 'types/graphql' + +// highlight-start +const DELETE = gql` + mutation DeleteCommentMutation($id: Int!) { + deleteComment(id: $id) { + postId + } + } +` +// highlight-end + +const formattedDate = (datetime: ConstructorParameters<typeof Date>[0]) => { + const parsedDate = new Date(datetime) + const month = parsedDate.toLocaleString('default', { month: 'long' }) + return `${parsedDate.getDate()} ${month} ${parsedDate.getFullYear()}` +} + +interface Props { + // highlight-next-line + comment: Pick<IComment, 'postId' | 'id' | 'name' | 'createdAt' | 'body'> +} + +const Comment = ({ comment }: Props) => { + const { hasRole } = useAuth() + // highlight-start + const [deleteComment] = useMutation(DELETE, { + refetchQueries: [ + { + query: CommentsQuery, + variables: { postId: comment.postId }, + }, + ], + }) + // highlight-end + + const moderate = () => { + if (confirm('Are you sure?')) { + // highlight-start + deleteComment({ + variables: { id: comment.id }, + }) + // highlight-end + } + } + + return ( + <div className="bg-gray-200 p-8 rounded-lg relative"> + <header className="flex justify-between"> + <h2 className="font-semibold text-gray-700">{comment.name}</h2> + <time className="text-xs text-gray-500" dateTime={comment.createdAt}> + {formattedDate(comment.createdAt)} + </time> + </header> + <p className="text-sm mt-2">{comment.body}</p> + {hasRole('moderator') && ( + <button + type="button" + onClick={moderate} + className="absolute bottom-2 right-2 bg-red-500 text-xs rounded text-white px-2 py-1" + > + Delete + </button> + )} + </div> + ) +} + +export default Comment +``` + +</TabItem> +</Tabs> + +We'll also need to update the `CommentsQuery` we're importing from `CommentsCell` to include the `postId` field, since we are relying on it to perform the `refetchQuery` after a successful deletion: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/CommentsCell/CommentsCell.jsx" +import Comment from 'src/components/Comment' + +export const QUERY = gql` + query CommentsQuery($postId: Int!) { + comments(postId: $postId) { + id + name + body + // highlight-next-line + postId + createdAt + } + } +` +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/components/CommentsCell/CommentsCell.tsx" +import Comment from 'src/components/Comment' + +export const QUERY = gql` + query CommentsQuery($postId: Int!) { + comments(postId: $postId) { + id + name + body + // highlight-next-line + postId + createdAt + } + } +` +``` + +</TabItem> +</Tabs> + +Click **Delete** (as a moderator) and the comment should be removed! + +Ideally we'd have both versions of this component (with and without the "Delete" button) present in Storybook so we can iterate on the design. But there's no such thing as "logging in" in Storybook and our code depends on being logged in so we can check our roles...how will that work? + +### Mocking currentUser for Storybook + +Similar to how we can mock GraphQL calls in Storybook, we can mock user authentication and authorization functionality in a story. + +In `Comment.stories.{jsx,tsx}` let's add a second story for the moderator view of the component (and rename the existing one for clarity): + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/Comment/Comment.stories.jsx" +import Comment from './Comment' + +// highlight-next-line +export const defaultView = () => { + return ( + <Comment + comment={{ + id: 1, + name: 'Rob Cameron', + body: 'This is the first comment!', + createdAt: '2020-01-01T12:34:56Z', + // highlight-next-line + postId: 1 + }} + /> + ) +} + +// highlight-start +export const moderatorView = () => { + return ( + <Comment + comment={{ + id: 1, + name: 'Rob Cameron', + body: 'This is the first comment!', + createdAt: '2020-01-01T12:34:56Z', + postId: 1, + }} + /> + ) +} +// highlight-end + +export default { title: 'Components/Comment' } +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/components/Comment/Comment.stories.ts" +import Comment from './Comment' + +// highlight-next-line +export const defaultView = () => { + return ( + <Comment + comment={{ + id: 1, + name: 'Rob Cameron', + body: 'This is the first comment!', + createdAt: '2020-01-01T12:34:56Z', + // highlight-next-line + postId: 1, + }} + /> + ) +} + +// highlight-start +export const moderatorView = () => { + return ( + <Comment + comment={{ + id: 1, + name: 'Rob Cameron', + body: 'This is the first comment!', + createdAt: '2020-01-01T12:34:56Z', + postId: 1, + }} + /> + ) +} +// highlight-end + +export default { title: 'Components/Comment' } +``` + +</TabItem> +</Tabs> + +The **moderatorView** story needs to have a user available that has the moderator role. We can do that with the `mockCurrentUser` function: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/Comment/Comment.stories.jsx" +export const moderatorView = () => { + // highlight-start + mockCurrentUser({ + id: 1, + email: 'moderator@moderator.com', + roles: 'moderator', + }) + // highlight-end + + return ( + <Comment + comment={{ + id: 1, + name: 'Rob Cameron', + body: 'This is the first comment!', + createdAt: '2020-01-01T12:34:56Z', + postId: 1, + }} + /> + ) +} +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/components/Comment/Comment.stories.tsx" +export const moderatorView = () => { + // highlight-start + mockCurrentUser({ + id: 1, + email: 'moderator@moderator.com', + roles: 'moderator', + }) + // highlight-end + + return ( + <Comment + comment={{ + id: 1, + name: 'Rob Cameron', + body: 'This is the first comment!', + createdAt: '2020-01-01T12:34:56Z', + postId: 1, + }} + /> + ) +} +``` + +</TabItem> +</Tabs> + +:::info Where did `mockCurrentUser()` come from? + +Similar to `mockGraphQLQuery()` and `mockGraphQLMutation()`, `mockCurrentUser()` is a global available in Storybook automatically, no need to import. + +::: + +`mockCurrentUser()` accepts an object and you can put whatever you want in there (it should be similar to what you return in `getCurrentUser()` in `api/src/lib/auth.{js,ts}`). But since we want `hasRole()` to work properly then the object must have a `roles` key that is a string or an array of strings. + +Check out **Comment** in Storybook and you should see two stories for Comment, one with a "Delete" button and one without! + +![image](https://user-images.githubusercontent.com/300/153970232-0224a6ab-fb86-4438-ae75-2e74e32aabc1.png) + +### Mocking currentUser for Jest + +We can use the same `mockCurrentUser()` function in our Jest tests as well. Let's check that the word "Delete" is present in the component's output when the user is a moderator, and that it's not present if the user has any other role (or no role): + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```jsx title="web/src/components/Comment/Comment.test.jsx" +// highlight-next-line +import { render, screen, waitFor } from '@redwoodjs/testing' + +import Comment from './Comment' + +// highlight-start +const COMMENT = { + name: 'John Doe', + body: 'This is my comment', + createdAt: '2020-01-02T12:34:56Z', +} +// highlight-end + +describe('Comment', () => { + it('renders successfully', () => { + // highlight-next-line + render(<Comment comment={COMMENT} />) + + // highlight-start + expect(screen.getByText(COMMENT.name)).toBeInTheDocument() + expect(screen.getByText(COMMENT.body)).toBeInTheDocument() + // highlight-end + const dateExpect = screen.getByText('2 January 2020') + expect(dateExpect).toBeInTheDocument() + expect(dateExpect.nodeName).toEqual('TIME') + // highlight-next-line + expect(dateExpect).toHaveAttribute('datetime', COMMENT.createdAt) + }) + + // highlight-start + it('does not render a delete button if user is logged out', async () => { + render(<Comment comment={COMMENT} />) + + await waitFor(() => + expect(screen.queryByText('Delete')).not.toBeInTheDocument() + ) + }) + + it('renders a delete button if the user is a moderator', async () => { + mockCurrentUser({ + id: 1, + email: 'moderator@moderator.com', + roles: 'moderator', + }) + render(<Comment comment={COMMENT} />) + + await waitFor(() => expect(screen.getByText('Delete')).toBeInTheDocument()) + }) + // highlight-end +}) +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```tsx title="web/src/components/Comment/Comment.test.tsx" +// highlight-next-line +import { render, screen, waitFor } from '@redwoodjs/testing' + +import Comment from './Comment' + +// highlight-start +const COMMENT = { + id: 1, + name: 'John Doe', + body: 'This is my comment', + createdAt: '2020-01-02T12:34:56Z', + postId: 1, +} +// highlight-end + +describe('Comment', () => { + it('renders successfully', () => { + // highlight-next-line + render(<Comment comment={COMMENT} />) + + // highlight-start + expect(screen.getByText(COMMENT.name)).toBeInTheDocument() + expect(screen.getByText(COMMENT.body)).toBeInTheDocument() + // highlight-end + const dateExpect = screen.getByText('2 January 2020') + expect(dateExpect).toBeInTheDocument() + expect(dateExpect.nodeName).toEqual('TIME') + // highlight-next-line + expect(dateExpect).toHaveAttribute('datetime', COMMENT.createdAt) + }) + + // highlight-start + it('does not render a delete button if user is logged out', async () => { + render(<Comment comment={COMMENT} />) + + await waitFor(() => + expect(screen.queryByText('Delete')).not.toBeInTheDocument() + ) + }) + + it('renders a delete button if the user is a moderator', async () => { + mockCurrentUser({ + id: 1, + email: 'moderator@moderator.com', + roles: 'moderator', + }) + + render(<Comment comment={COMMENT} />) + + await waitFor(() => expect(screen.getByText('Delete')).toBeInTheDocument()) + }) + // highlight-end +}) +``` + +</TabItem> +</Tabs> + +We moved the default `comment` object to a constant `COMMENT` and then used that in all tests. We also needed to add `waitFor()` since the `hasRole()` check in the Comment itself actually executes some GraphQL calls behind the scenes to figure out who the user is. The test suite makes mocked GraphQL calls, but they're still asynchronous and need to be waited for. If you don't wait, then `currentUser` will be `null` when the test starts, and Jest will be happy with that result. But we won't—we need to wait for the actual value from the GraphQL call. + +:::warning Seeing errors in your test suite? + +We added fields to the database and sometimes the test runner doesn't realize this. You may need to restart it to get the test database migrated to match what's in `schema.prisma`. Press `q` or `Ctrl-C` in your test runner if it's still running, then: + +```bash +yarn rw test +``` + +The suite should automatically run the tests for `Comment` and `CommentCell` at the very least, and maybe a few more if you haven't committed your code to git in a while. + +::: + +:::info + +This isn't the most robust test that's ever been written: what if the sample text of the comment itself had the word "Delete" in it? Whoops! But you get the idea—find some meaningful difference in each possible render state of a component and write a test that verifies its presence (or lack of presence). + +Think of each conditional in your component as another branch you need to have a test for. In the worst case, each conditional adds 2<sup>n</sup> possible render states. If you have three conditionals that's 2<sup>3</sup> (eight) possible combinations of output and to be safe you'll want to test them all. When you get yourself into this scenario it's a good sign that it's time to refactor and simplify your component. Maybe into subcomponents where each is responsible for just one of those conditional outputs? You'll still need the same number of total tests, but each component and its test is now operating in isolation and making sure it does one thing, and does it well. This has benefits for your mental model of the codebase as well. + +It's like finally organizing that junk drawer in the kitchen—you still have the same number of things when you're done, but each thing is in its own space and therefore easier to remember where it lives and makes it easier to find next time. + +::: + +You may see the following message output during the test run: + +```bash +console.error + Missing field 'postId' while writing result { + "id": 1, + "name": "Rob Cameron", + "body": "First comment", + "createdAt": "2020-01-02T12:34:56Z" + } +``` + +If you take a look at `CommentsCell.mock.{js,ts}` you'll see the mock data there used during the test. We're requesting `postId` in the `QUERY` in `CommentsCell` now, but this mock doesn't return it! We can fix that by simply adding that field to both mocks: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```javascript title="web/src/components/CommentsCell/CommentsCell.mock.js" +export const standard = () => ({ + comments: [ + { + id: 1, + name: 'Rob Cameron', + body: 'First comment', + // highlight-next-line + postId: 1, + createdAt: '2020-01-02T12:34:56Z', + }, + { + id: 2, + name: 'David Price', + body: 'Second comment', + // highlight-next-line + postId: 2, + createdAt: '2020-02-03T23:00:00Z', + }, + ], +}) +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```javascript title="web/src/components/CommentsCell/CommentsCell.mock.ts" +export const standard = () => ({ + comments: [ + { + id: 1, + name: 'Rob Cameron', + body: 'First comment', + // highlight-next-line + postId: 1, + createdAt: '2020-01-02T12:34:56Z', + }, + { + id: 2, + name: 'David Price', + body: 'Second comment', + // highlight-next-line + postId: 2, + createdAt: '2020-02-03T23:00:00Z', + }, + ], +}) +``` + +</TabItem> +</Tabs> + +We don't do anything with the actual post data in our tests, so there's no need to mock out the entire post, just a `postId` will suffice. + +### Roles on the API Side + +Remember: never trust the client! We need to lock down the backend to be sure that someone can't discover our `deleteComment` GraphQL resource and start deleing comments willy nilly. + +Recall in Part 1 of the tutorial we used a [directive](../../directives.md) `@requireAuth` to be sure that someone was logged in before allowing them to access a given GraphQL query or mutation. It turns out that `@requireAuth` can take an optional `roles` argument: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```graphql title="api/src/graphql/comments.sdl.js" +export const schema = gql` + type Comment { + id: Int! + name: String! + body: String! + post: Post! + postId: Int! + createdAt: DateTime! + } + + type Query { + comments(postId: Int!): [Comment!]! @skipAuth + } + + input CreateCommentInput { + name: String! + body: String! + postId: Int! + } + + input UpdateCommentInput { + name: String + body: String + postId: Int + } + + type Mutation { + createComment(input: CreateCommentInput!): Comment! @skipAuth + // highlight-next-line + deleteComment(id: Int!): Comment! @requireAuth(roles: "moderator") + } +` +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```graphql title="api/src/graphql/comments.sdl.ts" +export const schema = gql` + type Comment { + id: Int! + name: String! + body: String! + post: Post! + postId: Int! + createdAt: DateTime! + } + + type Query { + comments(postId: Int!): [Comment!]! @skipAuth + } + + input CreateCommentInput { + name: String! + body: String! + postId: Int! + } + + input UpdateCommentInput { + name: String + body: String + postId: Int + } + + type Mutation { + createComment(input: CreateCommentInput!): Comment! @skipAuth + // highlight-next-line + deleteComment(id: Int!): Comment! @requireAuth(roles: "moderator") + } +` +``` + +</TabItem> +</Tabs> + +Now a raw GraphQL query to the `deleteComment` mutation will result in an error if the user isn't logged in as a moderator. + +This check only prevents access to `deleteComment` via GraphQL. What if you're calling one service from another? If we wanted the same protection within the service itself, we could call `requireAuth` directly: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```javascript title="api/src/services/comments/comments.js" +// highlight-next-line +import { requireAuth } from 'src/lib/auth' +import { db } from 'src/lib/db' + +// ... + +export const deleteComment = ({ id }) => { + // highlight-next-line + requireAuth({ roles: 'moderator' }) + return db.comment.delete({ + where: { id }, + }) +} +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```ts title="api/src/services/comments/comments.ts" +// highlight-next-line +import { requireAuth } from 'src/lib/auth' +import { db } from 'src/lib/db' + +// ... + +export const deleteComment = ({ id }) => { + // highlight-next-line + requireAuth({ roles: 'moderator' }) + return db.comment.delete({ + where: { id }, + }) +} +``` + +</TabItem> +</Tabs> + +We'll need a test to go along with that functionality. How do we test `requireAuth()`? The api side also has a `mockCurrentUser()` function which behaves the same as the one on the web side: + +<Tabs groupId="js-ts"> +<TabItem value="js" label="JavaScript"> + +```javascript title="api/src/services/comments/comments.test.js" +// highlight-next-line +import { AuthenticationError, ForbiddenError } from '@redwoodjs/graphql-server' + +import { db } from 'src/lib/db' + +// highlight-next-line +import { comments, createComment, deleteComment } from './comments' + +describe('comments', () => { + scenario( + 'returns all comments for a single post from the database', + async (scenario) => { + const result = await comments({ postId: scenario.comment.jane.postId }) + const post = await db.post.findUnique({ + where: { id: scenario.comment.jane.postId }, + include: { comments: true }, + }) + expect(result.length).toEqual(post.comments.length) + } + ) + + scenario('postOnly', 'creates a new comment', async (scenario) => { + const comment = await createComment({ + input: { + name: 'Billy Bob', + body: 'What is your favorite tree bark?', + postId: scenario.post.bark.id, + }, + }) + + expect(comment.name).toEqual('Billy Bob') + expect(comment.body).toEqual('What is your favorite tree bark?') + expect(comment.postId).toEqual(scenario.post.bark.id) + expect(comment.createdAt).not.toEqual(null) + }) + + // highlight-start + scenario('allows a moderator to delete a comment', async (scenario) => { + mockCurrentUser({ roles: ['moderator'] }) + + const comment = await deleteComment({ + id: scenario.comment.jane.id, + }) + expect(comment.id).toEqual(scenario.comment.jane.id) + + const result = await comments({ postId: scenario.comment.jane.postId }) + expect(result.length).toEqual(0) + }) + + scenario( + 'does not allow a non-moderator to delete a comment', + async (scenario) => { + mockCurrentUser({ roles: 'user' }) + + expect(() => + deleteComment({ + id: scenario.comment.jane.id, + }) + ).toThrow(ForbiddenError) + } + ) + + scenario( + 'does not allow a logged out user to delete a comment', + async (scenario) => { + mockCurrentUser(null) + + expect(() => + deleteComment({ + id: scenario.comment.jane.id, + }) + ).toThrow(AuthenticationError) + } + ) + // highlight-end +}) +``` + +</TabItem> +<TabItem value="ts" label="TypeScript"> + +```ts title="api/src/services/comments/comments.test.ts" +// highlight-next-line +import { AuthenticationError, ForbiddenError } from '@redwoodjs/graphql-server' + +import { db } from 'src/lib/db' + +// highlight-next-line +import { comments, createComment, deleteComment } from './comments' + +import type { PostOnlyScenario, StandardScenario } from './comments.scenarios' + +describe('comments', () => { + scenario( + 'returns all comments for a single post from the database', + async (scenario) => { + const result = await comments({ postId: scenario.comment.jane.postId }) + const post = await db.post.findUnique({ + where: { id: scenario.comment.jane.postId }, + include: { comments: true }, + }) + expect(result.length).toEqual(post.comments.length) + } + ) + + scenario( + 'postOnly', + 'creates a new comment', + async (scenario: PostOnlyScenario) => { + const comment = await createComment({ + input: { + name: 'Billy Bob', + body: 'What is your favorite tree bark?', + postId: scenario.post.bark.id, + }, + }) + + expect(comment.name).toEqual('Billy Bob') + expect(comment.body).toEqual('What is your favorite tree bark?') + expect(comment.postId).toEqual(scenario.post.bark.id) + expect(comment.createdAt).not.toEqual(null) + } + ) + + // highlight-start + scenario( + 'allows a moderator to delete a comment', + async (scenario: StandardScenario) => { + mockCurrentUser({ + roles: 'moderator', + id: 1, + email: 'moderator@moderator.com', + }) + + const comment = await deleteComment({ + id: scenario.comment.jane.id, + }) + expect(comment.id).toEqual(scenario.comment.jane.id) + + const result = await comments({ postId: scenario.comment.jane.postId }) + expect(result.length).toEqual(0) + } + ) + + scenario( + 'does not allow a non-moderator to delete a comment', + async (scenario: StandardScenario) => { + mockCurrentUser({ roles: 'user', id: 1, email: 'user@user.com' }) + + expect(() => + deleteComment({ + id: scenario.comment.jane.id, + }) + ).toThrow(ForbiddenError) + } + ) + + scenario( + 'does not allow a logged out user to delete a comment', + async (scenario: StandardScenario) => { + mockCurrentUser(null) + + expect(() => + deleteComment({ + id: scenario.comment.jane.id, + }) + ).toThrow(AuthenticationError) + } + ) + // highlight-end +}) +``` + +</TabItem> +</Tabs> + +Our first scenario checks that we get the deleted comment back from a call to `deleteComment()`. The second expectation makes sure that the comment was actually removed from the database: trying to find a comment with that `id` now returns an empty array. If this was the only test we had it could lull us into a false sense of security—what if the user had a different role, or wasn't logged in at all? + +We aren't testing those cases here, so we add two more tests: one for if the user has a role other than "moderator" and one if the user isn't logged in at all. These two cases also raise different errors, so it's nice to see that codified here. + +### Last Word on Roles + +Having a role like "admin" implies that they can do everything...shouldn't they be able to delete comments as well? Right you are! There are two things we can do here: + +1. Add "admin" to the list of roles in the `hasRole()` checks in components, `@requireAuth` directive, and `requireAuth()` check in services +2. Don't make any changes in the code, just give the user in the database additional roles—so admins will also have the "moderator" role in addition to "admin" + +By virtue of the name "admin" it really feels like someone should only have that one single role and be able to do everything. So in this case it might feel better to add "admin" to `hasRole()` and `requireAuth()`. + +But, if you wanted to be more fine-grained with your roles then maybe the "admin" role should really be called "author". That way it makes it clear they only author posts, and if you want someone to be able to do both actions you can explicitly give them the "moderator" role in addition to "author." + +Managing roles can be a tricky thing to get right. Spend a little time up front thinking about how they'll interact and how much duplication you're willing to accept in your role-based function calls on the site. If you see yourself constantly adding multiple roles to `hasRole()` and `requireAuth()` that may be an indication that it's time to add a single, new role that includes those abilities and remove that duplication in your code. diff --git a/docs/versioned_docs/version-7.0/tutorial/foreword.md b/docs/versioned_docs/version-7.0/tutorial/foreword.md new file mode 100644 index 000000000000..7fd872c1adfb --- /dev/null +++ b/docs/versioned_docs/version-7.0/tutorial/foreword.md @@ -0,0 +1,86 @@ +# RedwoodJS: The Tutorial + +Welcome to Redwood! If you haven't yet, check out the [Redwood README](https://github.com/redwoodjs/redwood/blob/main/README.md) to get a little background on why we created Redwood and the problems it's meant to solve. Redwood brings several existing technologies together for the first time into what we think is the future of database-backed single page applications. + +:::info Sign up for tutorial reminders + +There's a new JavaScript framework coming out every week, we know it can be hard to keep up. If you'd like some non-spammy emails reminding you to go through the tutorial, give us your email below: + +<MailchimpForm /> + +::: + +In this tutorial we're going to build a blog engine. In reality a blog is probably not the ideal candidate for a Redwood app: blog articles can be stored in a CMS and statically generated to HTML files and served as flat files from a CDN (the classic [Jamstack](https://jamstack.org/) use case). But as most developers are familiar with a blog, and it uses all of the features we want to demonstrate, we decided to build one anyway. + +If you went through an earlier version of this tutorial you may remember it being split into parts 1 and 2. That was an artifact of the fact that most features demonstrated in part 2 didn't exist in the framework when part 1 was written. Once they were added we created part 2 to contain just those new features. Now that everything is integrated and working well we've moved each section into logically grouped chapters. + +## Callouts + +You'll find some callouts throughout the text to draw your attention: + +:::tip + +They might look like this... + +::: + +:::warning + +or sometimes like this... + +::: + +:::danger + +or maybe even like this! + +::: + +It's usually something that goes into more detail about a specific point, refers you to further reading, or calls out something important you should know. Here comes one now: + +:::info + +This tutorial assumes you are using version 7.0.0 or greater of RedwoodJS. + +::: + +Let's get started! + +export const MailchimpForm = () => ( + <> + <div id="mc_embed_signup"> + <form + action="https://thedavidprice.us19.list-manage.com/subscribe/post?u=0c27354a06a7fdf4d83ce07fc&id=a94da1950a" + method="post" + name="mc-embedded-subscribe-form" + target="_blank" + > + <div style={{ position: 'absolute', left: '-5000px' }} aria-hidden="true"> + <input + type="text" + name="b_0c27354a06a7fdf4d83ce07fc_a94da1950a" + tabIndex="-1" + defaultValue="" + /> + </div> + <div style={{ display: 'flex', alignItems: 'center', justify: 'center' }}> + <input + type="email" + defaultValue="" + placeholder="Email Address" + required={true} + name="EMAIL" + style={{ width: '100%', padding: '0.75rem', border: '1px solid #cccccc', borderRadius: '0.25rem', fontSize: '100%' }} + /> + <input + type="submit" + value="Subscribe" + name="subscribe" + style={{ cursor: 'pointer', marginLeft: '0.5rem', padding: '0.8rem 2rem', fontSize: '100%', fontWeight: 'bold', color: '#ffffff', backgroundColor: '#4cb3d4', border: 'none', borderRadius: '0.25rem' }} + /> + </div> + </form> + </div> + </> +) + diff --git a/docs/versioned_docs/version-7.0/tutorial/intermission.md b/docs/versioned_docs/version-7.0/tutorial/intermission.md new file mode 100644 index 000000000000..793730157af2 --- /dev/null +++ b/docs/versioned_docs/version-7.0/tutorial/intermission.md @@ -0,0 +1,63 @@ +# Intermission + +Let's take a break! If you really went through the whole tutorial so far: congratulations! If you just skipped ahead to this page to try and get a free congratulations: tsk, tsk! + +That was potentially a lot of new concepts to absorb all at once so don't feel bad if all of it didn't fully sink in. React, GraphQL, Prisma, serverless functions...so many things! Even those of us working on the framework are heading over to Google multiple times per day to figure out how to get these things to work together. + +As an anonymous Twitter user once mused: "If you enjoy switching between feeling like the smartest person on earth and the dumbest person in history all in the same day, programming may be the career for you!" + +## What's Next? + +Starting in Chapter 5 We'll look at Storybook and Jest and build a new feature for the blog: comments. Storybook introduces a new way to build components. We'll also add tests and run them with Jest to make sure things keep working as we expect. We cover authorization as well by giving a special role to comment moderators. + +If you've been through the tutorial so far, you can pick up where you left off and continue from here with Chapter 5. However, going forward we assume a complete test suite and several Storybook components, which we didn't get a chance to build in the first half. To get to the same starting point as the beginning of Chapter 5 you can start from this [example repo](https://github.com/redwoodjs/redwood-tutorial) (which we highly recommend) that picks up at the end of chapter 4, but already has additional styling, a starting test suite, and several Storybook components already built for you. + +### Using Your Current Codebase + +If you want to use the same CSS classes we use in the following examples you'll need to add Tailwind to your repo: + +```bash +yarn rw setup ui tailwindcss +``` + +However, none of the screenshots that follow will come anywhere close to what you're seeing in your browser (except for those isolated components you build in Storybook) so you may want to just start with the [example repo](https://github.com/redwoodjs/redwood-tutorial). You'll also be missing out on a good starting test suite that we've added! + +If you're *still* set on continuing with your own repo, and you deployed to a service like Netlify, you would have changed the database provider in `schema.prisma` to `postgresql`. If that's the case then make sure your local development environment has changed over as well. Check out the [Local Postgres Setup](../local-postgres-setup.md) for assistance. If you stick with the [example repo](https://github.com/redwoodjs/redwood-tutorial) instead, you can go ahead with good ol' SQLite (what we were using locally to build everything in the first half). + +Once you're ready, start up the dev server: + +```bash +yarn rw dev +``` + +### Using the Example Repo (Recommended) + +If you haven't been through the first tutorial, or maybe you went through it on an older version of Redwood (anything pre-0.41) you can clone [this repo](https://github.com/redwoodjs/redwood-tutorial) which contains everything built so far and also adds a little styling so it isn't quite so...tough to look at. The example repo includes [TailwindCSS](https://tailwindcss.com) to style things up and adds a `<div>` or two to give us some additional hooks to hang styling on. + +:::warning The TypeScript version of the Example Repo is currently in progress + +If you want to complete the tutorial in TypeScript, continue with your own repo, making any necessary edits. Don't worry, the remainder of the tutorial continues to offer both TypeScript and JavaScript example code changes. + +::: + +```bash +git clone https://github.com/redwoodjs/redwood-tutorial +cd redwood-tutorial +yarn install +yarn rw prisma migrate dev +yarn rw g secret +``` + +That'll check out the repo, install all the dependencies, create your local database (SQLite) and fill it with a few blog posts. After that last command (`yarn rw g secret`) you'll need to copy the string that's output and add it to a file `.env` in the root of your project: + +```bash title=".env" +SESSION_SECRET=JV2kA48ZU4FnLHwqaydy9beJ99qy4VgWXPkvsaw3xE2LGyuSur2dVq2PsPkPfygr +``` + +This is the encryption key for the secure cookies used in [dbAuth](/docs/tutorial/chapter4/authentication#session-secret). + +Now just run `yarn rw dev` to start your development server. Your browser should open to a fresh new blog app: + +![image](https://user-images.githubusercontent.com/300/101423176-54e93780-38ad-11eb-9230-ba8557764eb4.png) + +Take a bathroom break and grab a fresh beverage, then let's get on with it! diff --git a/docs/versioned_docs/version-7.0/typescript/generated-types.md b/docs/versioned_docs/version-7.0/typescript/generated-types.md new file mode 100644 index 000000000000..644cc7b39bd4 --- /dev/null +++ b/docs/versioned_docs/version-7.0/typescript/generated-types.md @@ -0,0 +1,203 @@ +--- +description: A look at automatic type generation in Redwood +--- + +# Generated Types + +To add to the TypeScript (and JavaScript!) experience, Redwood generates types for you. +These generated types not only include your GraphQL operations, but also your named routes, Cells, scenarios, and tests. + +When you run `yarn rw dev`, the CLI watches files for changes and triggers type generation automatically, but you can trigger it manually too: + +```shell +yarn rw g types +# or +# yarn redwood generate types +``` + +:::tip Getting errors trying to generate types? + +If you're getting errors trying to generate types, it's worth checking the GraphQL operations in your Cells and SDLs. +Make sure that they're syntactically valid, and that every query and mutation on the web side is defined in an `*.sdl.js` file on the api side. + +::: + +If you're curious, you can find the generated types in the `.redwood/types`, `web/types/graphql.d.ts`, and `api/types/graphql.d.ts` directories. Broadly speaking, Redwood generates the following types: + +1. ["mirror" types](https://www.typescriptlang.org/docs/handbook/module-resolution.html#virtual-directories-with-rootdirs) for your components, pages, layouts, etc. on the web side, and for your services, lib, etc. on the api side +2. types based on your queries and mutations on the web side (in `web/types/graphql.d.ts`) +3. types for resolvers based on your SDLs on the api side (in `api/types/graphql.d.ts`) +4. types for testing, `currentUser`, etc. +5. types for certain functions like `routes.pageName()` and `useAuth()` + +## CurrentUser + +If you've setup auth, the type for the current user on both the web and the api side gets automatically "inferred" from the `getCurrentUser` function in `api/src/lib/auth.ts`. + +For example, if you specify the return type on `getCurrentUser` as... + +```ts title="api/src/lib/auth.ts" +interface MyCurrentUser { + id: string, + roles: string[], + email: string, + projectId: number +} + +const getCurrentUser = ({decoded}): MyCurrentUser => { + //.. +} +``` + +The types for both `useAuth().currentUser` on the web side and `context.currentUser` on the api side will be the same—the `MyCurrentUser` interface. + +:::info Type of `context.currentUser` unknown? +This usually happens when you don't have the various generated and utility types in your project. +Run `yarn rw g types`, and just to be sure, restart your TS server. +In VSCode, you can do this by running "TypeScript: Restart TS server" in the command palette (Cmd+Shift+P on Mac, Ctrl+Shift+P on Windows) +::: + +## Query and Mutation types + +Let's say you have a query in a Cell that looks like this: + +```js title="web/src/components/BlogPostCell.tsx" +export const QUERY = gql` + # 👇 Make sure to name your GraphQL operations + query FindBlogPostQuery($id: Int!) { + blogPost: post(id: $id) { + title + body + } + } +` +``` + +Redwood generates types for both the data returned from the query and the query's variables. +These generated types will use the query's name—in this case, `FindBlogPostQuery`—so you can import them like this: + +```ts title="web/src/components/BlogPostCell.tsx" +import type { FindBlogPostQuery, FindBlogPostQueryVariables } from 'types/graphql' +``` + +`FindBlogPostQuery` is the type of the data returned from the query (`{ title: string, body: string }`) and `FindBlogPostQueryVariables` is the type of the query's variables (`{ id: number }`). + +The import statement's specifier, `'types/graphql'`, is a [mapped path](https://www.typescriptlang.org/docs/handbook/module-resolution.html#path-mapping). First, TypeScript will look for the types in `web/types/graphql.d.ts`; if they're not there, it'll check `types/graphql.d.ts`. Redwood only automatically generates the former. For the latter, see [sharing types between sides](./introduction.md#sharing-types-between-sides). + +But don't worry too much. If you use the generators, they template all of this for you! + +## Resolver Types + +Generated Services include types for query and mutation resolvers: + +```ts title="api/src/services/posts.ts" +// highlight-next-line +import type { QueryResolvers, MutationResolvers } from 'types/graphql' + +import { db } from 'src/lib/db' + +// highlight-next-line +export const posts: QueryResolvers['posts'] = () => { + return db.post.findMany() +} + +// highlight-next-line +export const post: QueryResolvers['post'] = ({ id }) => { + return db.post.findUnique({ + where: { id }, + }) +} +``` + +These types help you by making sure you're returning an object in the shape of what you've defined in your SDL. If your Prisma model name matches the SDL type name, it'll be "mapped" i.e. the resolvers will expect you to return the Prisma type. + +Note that these types expect you to return the _complete_ type that you've defined in your Prisma schema. But you can just return the result of the Prisma query, and not have to worry about how, for example, a DateTime in Prisma maps to a String in GraphQL. + +If the type doesn't match your Prisma models (by name), the TypeScript type will be generated based only on your definition in the SDL. So if you wish to return other properties that don't exist in your Prisma model type i.e. augment the prisma type with additional fields, you can change the type to a custom one in your SDL. + +The resolver types help you by making sure you're returning an object in the shape of what you've defined in your SDL. + +:::note A note on union types + +Lets say that in one of your SDLs, you define a union type + +```graphql +type OutOfStock { + message: String! +} + +// highlight-next-line +union CandyResult = Candy | OutOfStock + +type Query { + candy(id: String!): CandyResult @skipAuth +} +``` + +These types will also be handled automatically. But if you're returning a different Prisma model (instead of something like the generic `OutOfStock` type we have here, which is just a message), you may need to write your own resolver type, as the type generator won't know how to map the Prisma type to the GraphQL return type. + +::: + +## Under the Hood + +Redwood uses [GraphQL Code Generator](https://www.graphql-code-generator.com) (aka graphql-codegen) to generate types for your GraphQL operations and SDLs. It's even configured to use the types from your generated Prisma Client, to make sure that your resolvers are strongly typed! + +### Customizing GraphQL Code Generation + +While the default settings are configured so that things just work️, you can customize them to your liking by adding a `./codegen.yml` file to the root of your project. + +:::info Curious about the defaults? + +You can find them [here](https://github.com/redwoodjs/redwood/blob/main/packages/internal/src/generate/graphqlCodeGen.ts) in Redwood's source. Look for the `generateTypeDefGraphQLWeb` and `generateTypeDefGraphQLApi` functions. + +::: + +For example, adding this `codegen.yml` to the root of your project will transform the names of the generated types to UPPERCASE: + +```yml title="codegen.yml" +config: + namingConvention: + typeNames: change-case-all#upperCase +``` + +You can configure graphql-codegen in a number of different ways: `codegen.yml`, `codegen.json`, or `codegen.js`. Even a `codegen` key in your root `package.json` will do. graphql-codegen uses [cosmiconfig](https://github.com/davidtheclark/cosmiconfig#cosmiconfig) under the hood—take a look at their docs if you want to know more. + +For completeness, [here's the docs](https://www.graphql-code-generator.com/docs/config-reference/config-field) on configuring GraphQL Code Generator. Currently, Redwood only supports the root level `config` option. + +## Experimental SDL Code Generation + +There is also an experimental code generator based on [sdl-codegen](https://github.com/sdl-codegen/sdl-codegen) available. sdl-codegen is a fresh implementation of code generation for service files, built with Redwood in mind. It is currently in opt-in and can be enabled by setting the `experimentalSdlCodeGen` flag to `true` in your `redwood.toml` file: + +```toml title="redwood.toml" +[experimental] + useSDLCodeGenForGraphQLTypes = true +``` + +Running `yarn rw g types` will generate types for your resolvers on a per-file basis, this feature can be paired with the optional eslint auto-fix rule to have types automatically applied to your resolvers in TypeScript service files by editing your root `package.json` with: + +```diff title="package.json" + "eslintConfig": { + "extends": "@redwoodjs/eslint-config", + "root": true, + "parserOptions": { + "warnOnUnsupportedTypeScriptVersion": false + }, ++ "overrides": [ ++ { ++ "files": [ ++ "api/src/services/**/*.ts" ++ ], ++ "rules": { ++ "@redwoodjs/service-type-annotations": "error" ++ } ++ } + ] + }, +``` + +:::tip Using VSCode? + +As a part of type generation, the extension [GraphQL: Language Feature Support](https://marketplace.visualstudio.com/items?itemName=GraphQL.vscode-graphql) configures itself based on the merged schema Redwood generates in `.redwood/schema.graphql`. +You can configure it further in `graphql.config.js` at the root of your project. + +::: diff --git a/docs/versioned_docs/version-7.0/typescript/introduction.md b/docs/versioned_docs/version-7.0/typescript/introduction.md new file mode 100644 index 000000000000..eb881419fac1 --- /dev/null +++ b/docs/versioned_docs/version-7.0/typescript/introduction.md @@ -0,0 +1,134 @@ +--- +title: TypeScript in Redwood +description: Getting started & Core Concepts +--- + +Redwood comes with full TypeScript support, and you don't have to give up any of the conveniences that Redwood offers to enjoy all the benefits of a type-safe codebase. + +## Getting Started + +### Starting a Redwood Project in TypeScript + +You can use the `--typescript` option on `yarn create redwood-app` to use TypeScript from the start: + +```shell +yarn create redwood-app my-redwood-app --typescript +``` + +### Converting a JavaScript Project to TypeScript + +Started your project in JavaScript but want to switch to TypeScript? +Start by using the `tsconfig` setup command: + +```shell +yarn rw setup tsconfig +``` + +This adds `tsconfig.json` files to both the web and the api side, telling VSCode that this's a TypeScript project. +(You can go ahead and remove the `jsconfig.json` files from both sides now.) + +You don't need to convert all your JavaScript files to TypeScript right away. +In fact, you probably shouldn't. +Do it incrementally. +Start by renaming your files from `.js` to `.ts`. (Or, if they have a React component, `.tsx`.) + +## Core Concepts + +### 1. Automatic types + +When you're developing in TypeScript, the Redwood CLI is your trusted companion—focus on writing code and it'll generate as many of the types as it can. +When you run `yarn rw dev`, the CLI watches files for changes so that it can generate types. +(More on this in the [Generated Types](/typescript/generated-types.md) doc.) + +But let's say that you don't have the dev server running, and are just coding and notice missing types. +You can always run `yarn rw g types` to make sure you have all the types you need. + +### 2. Use generators to learn about available utility types + +Let's say you generate a Cell using the command `yarn rw g cell Post`. If your project is in TypeScript, the generated files will contain a bunch of utility types (imported from `@redwoodjs/web`), as well as types specific to your project (imported from `types/graphql`). +You don't need to learn all the utility types up front, but they're documented in detail in the [Utility Types](/typescript/utility-types.md) doc when you're ready. + +### 3. Redwood won't force you to type everything + +The Redwood philosophy is to keep things as simple as possible at first. Redwood generates as much as possible, avoids forcing you to type every little detail, and doesn't have [strict mode](https://www.typescriptlang.org/tsconfig#strict) on by default. +Where needed (e.g. the [`DbAuthHandler`](/typescript/utility-types.md#dbauthhandleroptions)) you can make use of generics to be more specific with your types. + +But if you're comfortable with TypeScript and want that extra level of safety, take a look at our [Strict Mode](/typescript/strict-mode.md) doc. + +## A Few Useful Tips + +### Sharing Types between Sides + +To share types between sides: + +1. Put them in a directory called `types` at the root of your project (you may have to create this directory) +2. Restart your editor's TypeScript server. In VSCode, you can do this by running the "TypeScript: Restart TS server" command via the command palette (make sure you're in a `.js` or `.ts` file) + +### Running Type Checks + +Behind the scenes, Redwood actually uses Babel to transpile TypeScript. +This's why you're able to convert your project from JavaScript to TypeScript incrementally, but it also means that, strictly speaking, dev and build don't care about what the TypeScript compiler has to say. + +That's where the `type-check` command comes in: + +``` +yarn rw type-check +``` + +This runs `tsc` on your project and ensures that all the necessary generated types are generated first. Checkout the [CLI reference for type-check](cli-commands.md#type-check) + +### Using Alias Paths + +Alias paths are a mechanism that allows you to define custom shortcuts or aliases for import statements in your code. Instead of using relative or absolute paths to import modules or files, you can use these aliases to make your imports cleaner and more concise. + +Redwood comes with a great starting point by aliasing the `src` directory, but you can take this further by configuring your `tsconfig.json` file, your import paths could go from: + +```ts +// this really long path +import { CustomModal } from 'src/components/modules/admin/common/ui/CustomModal' + +// to this nicer one! +import { CustomModal } from '@adminUI/CustomModal' +``` + +Add you custom `@adminUI` alias to your `tsconfig.json` file: + +```json +{ + "compilerOptions": { +... + "paths": { + "src/*": ["./src/*", "../.redwood/types/mirror/api/src/*"], + + "@adminUI/*": [ + "./src/components/modules/admin/common/ui/*", + "../.redwood/types/mirror/web/src/components/modules/admin/common/ui/*" + ], + + "types/*": ["./types/*", "../types/*"], + "@redwoodjs/testing": ["../node_modules/@redwoodjs/testing/api"] + } + } +... +} +``` + +You might have noticed the `"../.redwood/types/mirror/web/src/components/modules/admin/common/ui/*"` path. I'm glad you did! + +When you build your project redwood will create a set of directories or a virtual directory called`.redwood`, [read more about this typescript feature here](https://www.typescriptlang.org/docs/handbook/module-resolution.html#virtual-directories-with-rootdirs). This directory contains types for te Cells, so there is no need for us to specify an index file. + +When you combine those two paths `.src/...` and `./.redwood/...` under an alias you can have shorter and cleaner import paths: + +```ts +// Instead of this 🥵 +import { CustomModal } from 'src/components/modules/admin/common/ui/CustomModal/CustomModal' + +// they could look like this ✨ +import { CustomModal } from '@adminUI/CustomModal' +``` + +#### Some benefits of using alias paths are + +1. **Improved code readability**, by abstracting complex directory hierarchies, and having meaningful names for your imports. +1. **Code maintainability**, aliases allow you to decouple your code from the file structure and more easily move files around, as they are not tied to the longer path. +1. **Reduce boilerplate**, no more `../../src/components/modules/admin/common/ui/` 😮‍💨 diff --git a/docs/versioned_docs/version-7.0/typescript/strict-mode.md b/docs/versioned_docs/version-7.0/typescript/strict-mode.md new file mode 100644 index 000000000000..221d7e6fe98b --- /dev/null +++ b/docs/versioned_docs/version-7.0/typescript/strict-mode.md @@ -0,0 +1,267 @@ +--- +description: TS Strict mode tips and tricks +--- + +# TypeScript Strict Mode + +Looks like you're ready to level up your TypeScript game! +Redwood supports [strict mode](https://www.typescriptlang.org/docs/handbook/2/basic-types.html#strictness), but doesn't enable it by default. +While strict mode gives you a lot more safety, it makes your code a bit more verbose and requires you to make small manual changes if you use the generators. + +## Enabling strict mode + +Enable strict mode by setting `strict` to true in `web/tsconfig.json` and `api/tsconfig.json`, and if you're using scripts in `scripts/tsconfig.json`: + +```json title="web/tsconfig.json, api/tsconfig.json, scripts/tsconfig.json" +{ + "compilerOptions": { + "noEmit": true, + "allowJs": true, + // highlight-next-line + "strict": true, + // ... + } + // ... +} +``` + +Redwood's type generator behaves a bit differently in strict mode, so now that you've opted in, make sure to generate types: + +``` +yarn rw g types +``` + +## Manual tweaks to generated code + +Now that you're in strict mode, there are some changes you need to make to get rid of those pesky red underlines! + +### `null` and `undefined` in Services + +One of the challenges in the GraphQL-Prisma world is the difference in the way they treats optionals: + +- for GraphQL, optional fields can be `null` +- but For Prisma, `null` is a value, and `undefined` means "do nothing" + +This is covered in detail in [Prisma's docs](https://www.prisma.io/docs/concepts/components/prisma-client/null-and-undefined), which we strongly recommend reading. +But the gist of it is that, for Prisma's create and update operations, you may have to make sure `null`s are converted to `undefined` from your GraphQL mutation inputs. You'll have to think carefully about the behaviour you want - if the client is expected to send null, and you expect those fields to be set to null, you can make the field nullable in your Prisma schema. Sending a null will mean removing that value, sending undefined will mean that the field won't be updated. + +For most cases however, you probably want to convert nulls to undefined - one way to do this is to use the `removeNulls` utility function from `@redwoodjs/api`: + +```ts title="api/src/services/users.ts" +// highlight-next-line +import { removeNulls } from "@redwoodjs/api" + +export const updateUser: MutationResolvers["updateUser"] = ({ id, input }) => { + return db.user.update({ + // highlight-next-line + data: removeNulls(input), + where: { id }, + }) +} +``` + +### Relation resolvers in services + +Let's say you have a `Post` model in your `schema.prisma` that has an `author` field which is a relation to the `Author` model. It's a required field. +This is what the `Post` model's SDL would probably look like: + +```graphql post.sdl.ts +export const schema = gql` + type Post { + id: Int! + title: String! + // highlight-next-line + author: Author! # 👈 This is a relation; the `!` makes it a required field + authorId: Int! + # ... + } +``` + +When you generate SDLs or Services, the resolver for `author` is generated at the bottom of `post.service.ts` on the `Post` object. +Because `Post.author` can't be null (we said it's required in the Prisma schema)—and because `findUnique` always returns a nullable value—in strict mode, you'll have to tweak this resolver: + +```ts Post.service.ts +// Option 1: Override the type +// The typecasting here is OK. `root` is the post that was _already found_ +// by the `post` function in your Services, so `findUnique` will always find it! +export const Post: PostRelationResolvers = { + author: (_obj, { root }) => + db.post.findUnique({ where: { id: root?.id } }).author() as Promise<Author>, // 👈 +} + +// Option 2: Check for null +export const Post: PostRelationResolvers = { + author: async (_obj, { root }) => { + // Here, `findUnique` can return `null`, so we have to handle it: + const maybeAuthor = await db.post + .findUnique({ where: { id: root?.id } }) + .author() + + // highlight-start + if (!maybeAuthor) { + throw new Error('Could not resolve author') + } + // highlight-end + + return maybeAuthor + }, +} +``` + + +:::tip An optimization tip + +If the relation truly is required, it may make more sense to include `author` in your `post` Service's Prisma query and modify the `Post.author` resolver accordingly: + +```ts +export const post: QueryResolvers['post'] = ({ id }) => { + return db.post.findUnique({ + // highlight-start + include: { + author: true, + }, + // highlight-end + where: { id }, + }) +} + +export const Post: PostRelationResolvers = { + author: async (_obj, { root }) => { + // highlight-start + if (root.author) { + return root.author + } + // highlight-end + + const maybeAuthor = await db.post.findUnique(// ... +``` + +This will also help Prisma make a more optimized query to the database, since every time a field on `Post` is requested, the post's author is too! The tradeoff here is that any query to `Post` (even if the author isn't requested) will mean an unnecessary database query to include the author. + +::: + +### Roles checks for CurrentUser in `src/lib/auth` + +When you setup auth, Redwood includes some template code for handling roles with the `hasRole` function. +While Redwood does runtime checks to make sure it doesn't access roles if it doesn't exist, TypeScript in strict mode will highlight errors, depending on whether you are returning `roles`, and whether those roles are `string` or `string[]` + +```typescript +export const hasRole = (roles: AllowedRoles): boolean => { + if (!isAuthenticated()) { + return false + } + + // highlight-next-line + const currentUserRoles = context.currentUser?.roles + // Error: Property 'roles' does not exist on type '{ id: number; }'.ts(2339) +``` + +You'll have to adjust the generated code depending on your User model. + +<details> +<summary>Example code diffs</summary> + +#### A. If your project does not use roles + +If your `getCurrentUser` doesn't return `roles`, and you don't use this functionality, you can safely remove the `hasRole` function. + +#### B. Roles on current user is a string + +Alternatively, if you define the roles as a string, you can remove the code that does checks against Arrays + +```diff title="api/src/lib/auth.ts" +export const hasRole = (roles: AllowedRoles): boolean => { + if (!isAuthenticated()) { + return false + } + + const currentUserRoles = context.currentUser?.roles + + if (typeof roles === 'string') { +- if (typeof currentUserRoles === 'string') { + return currentUserRoles === roles +- } + } + + if (Array.isArray(roles)) { +- if (Array.isArray(currentUserRoles)) { +- return currentUserRoles?.some((allowedRole) => +- roles.includes(allowedRole) +- ) +- } else if (typeof currentUserRoles === 'string') { + // roles to check is an array, currentUser.roles is a string + return roles.some((allowedRole) => currentUserRoles === allowedRole) +- } + } + + // roles not found + return false +} +``` + +#### C. Roles on current user is an Array of strings + +If in your User model, roles are an array of strings, and can never be just a string, you can safely remove most of the code + +```diff title="api/src/lib/auth.ts" +export const hasRole = (roles: AllowedRoles): boolean => { + if (!isAuthenticated()) { + return false + } + + const currentUserRoles = context.currentUser?.roles + + if (typeof roles === 'string') { +- if (typeof currentUserRoles === 'string') { +- return currentUserRoles === roles +- } else if (Array.isArray(currentUserRoles)) { + // roles to check is a string, currentUser.roles is an array + return currentUserRoles?.some((allowedRole) => roles === allowedRole) +- } + } + + if (Array.isArray(roles)) { +- if (Array.isArray(currentUserRoles)) { + return currentUserRoles?.some((allowedRole) => + roles.includes(allowedRole) + ) +- } else if (typeof currentUserRoles === 'string') { +- return roles.some( +- (allowedRole) => currentUserRoles === allowedRole +- ) + } + } + + // roles not found + return false +} +``` +</details> + +### `getCurrentUser` in `api/src/lib/auth.ts` + +Depending on your auth provider—i.e., anything but dbAuth—because it could change based on your account settings (if you include roles or other metadata), we can't know the shape of your decoded token at setup time. +So you'll have to make sure that the `getCurrentUser` function is typed. + +To help you get started, the comments above the `getCurrentUser` function describe its parameters' types. We recommend typing `decoded` without using imported types from Redwood, as this may be a little too generic! + +```ts title='api/src/lib/auth.ts' +import type { AuthContextPayload } from '@redwoodjs/api' + +// Example 1: typing directly +export const getCurrentUser: CurrentUserFunc = async ( + decoded: { id: string, name: string }, + { token, type }: { token: string, type: string }, +) => { + // ... +} + +// Example 2: Using AuthContextPayload +export const getCurrentUser: CurrentUserFunc = async ( + decoded: { id: string, name: string }, + { token, type }: AuthContextPayload[1], + { event, context }: AuthContextPayload[2] +) => { + // ... +} +``` diff --git a/docs/versioned_docs/version-7.0/typescript/utility-types.md b/docs/versioned_docs/version-7.0/typescript/utility-types.md new file mode 100644 index 000000000000..a01895f1adfd --- /dev/null +++ b/docs/versioned_docs/version-7.0/typescript/utility-types.md @@ -0,0 +1,286 @@ +--- +description: Utility types exposed by Redwood +--- + +# Redwood Utility Types + +Besides generating types for you, Redwood exposes a handful of utility types for Cells, Scenarios, and DbAuth. +You'll see these helpers quite often if you use the generators, so let's walk through some of them. By the end of this, you'll likely see a pattern in these types and their use of [Generics](https://www.typescriptlang.org/docs/handbook/2/generics.html). + +## Cells + +Cells created using the generators come with all the types your normally need, including the `CellSuccessProps`, `CellFailureProps`, and `CellLoadingProps` utility types. + +### `CellSuccessProps<TData, TVariables>` + +This is used to type the props of your Cell's `Success` component. +It takes two arguments as generics: + +| Generic | Description | +|:-------------|:-----------------------------------------------------------------------------------------| +| `TData` | The type of data you're expecting to receive (usually the type generated from the query) | +| `TVariables` | An optional second parameter for the type of the query's variables | + +Not only does `CellSuccessProps` type the data returned from the query, but it also types the variables and methods returned by Apollo Client's `useQuery` hook! + +```ts title="web/src/components/BlogPostCell.tsx" +import type { FindBlogPostQuery, FindBlogPostQueryVariables } from 'types/graphql' + +// highlight-next-line +import type { CellSuccessProps } from '@redwoodjs/web' + +// ... + +// highlight-next-line +type SuccessProps = CellSuccessProps<FindBlogPostQuery, FindBlogPostQueryVariables> + +export const Success = ({ + blogPost, // From the query. This is typed of course + queryResult // 👈 From Apollo Client. This is typed too! +// highlight-next-line +}: SuccessProps) => { + // ... +} +``` + +### `CellFailureProps<TVariables>` + +This gives you the types of the props in your Cell's `Failure` component. +It takes `TVariables` as an optional generic parameter, which is useful if you want to print error messages like `"Couldn't load data for ${variables.searchTerm}"`: + +```ts title="web/src/components/BlogPostCell.tsx" +import type { FindBlogPostQuery, FindBlogPostQueryVariables } from 'types/graphql' + +// highlight-next-line +import type { CellFailureProps } from '@redwoodjs/web' + +// ... + +export const Failure = ({ + error, + variables // 👈 Variables is typed based on the generic + // highlight-next-line +}: CellFailureProps<FindBlogPostQueryVariables>) => ( + // ... +) +``` + +### `CellLoadingProps<TVariables>` + +Similar to `CellFailureProps`, but for the props of your Cell's `Loading` component: + +```ts title="web/src/components/BlogPostCell.tsx" +import type { FindBlogPostQuery, FindBlogPostQueryVariables } from 'types/graphql' + +// highlight-next-line +import type { CellLoadingProps } from '@redwoodjs/web' + +// ... + +// highlight-next-line +export const Loading = (props: CellLoadingProps<FindBlogPostQueryVariables>) => ( + <div>Loading...</div> +) +``` + +## Scenarios & Testing + +Over on the api side, when you generate SDLs and Services, Redwood generates tests and scenarios with all the types required. Let's take a deeper look at scenario types. + +### `defineScenario` + +This is actually a function, not a type, but it takes a lot of generics. Use as many or as few as you find helpful. + +``` +defineScenario<PrismaCreateType, TName, TKey> +``` + +| Generic | Description | +|:-------------------|:------------------------------------------------------------------------------------------------------| +| `PrismaCreateType` | (Optional) the type imported from Prisma's create operation that goes into the "data" key | +| `TName` | (Optional) the name or names of the models in your scenario | +| `TKeys` | (Optional) the key(s) in your scenario. These are really only useful while you write out the scenario | + +An example: + +```ts title="posts.scenarios.ts" +import type { Prisma, Post } from '@prisma/client' + +export const standard = defineScenario<Prisma.PostCreateArgs, 'post', 'one'>({ + //👇 TName + post: { + // 👇 TKey + one: { + // 👇 PrismaCreateType. Notice how we import the type from @prisma/client + data: { title: 'String', body: 'String', metadata: { foo: 'bar' } }, + }, + }, +}) +``` + +If you have more than one model in a single scenario, you can use unions: + +```ts +defineScenario<Prisma.PostCreateArgs | Prisma.UserCreateArgs, 'post' | 'user'> +``` + +### `ScenarioData<TModel, TName, TKeys>` + +This utility type makes it easy for you to access data created by your scenarios in your tests. +It takes three generic parameters: + +| Generic | Description | +|:--------|:---------------------------------------------------------------------------------| +| `TData` | The Prisma model that'll be returned | +| `TName` | (Optional) the name of the model. ("post" in the example below) | +| `TKeys` | (optional) the key(s) used to define the scenario. ("one" in the example below) | + +We know this is a lot of generics, but that's so you get to choose how specific you want to be with the types! + +```ts title="api/src/services/posts/posts.scenario.ts" +import type { Post } from '@prisma/client' + +//... + +export type StandardScenario = ScenarioData<Post, 'post'> +``` + +```ts title="api/src/services/posts/posts.test.ts" +import type { StandardScenario } from './posts.scenarios' + +scenario('returns a single post', async (scenario: StandardScenario) => { + const result = await post({ id: scenario.post.one.id }) +}) +``` + +You can of course just define the type in the test file instead of importing it. Just be aware that if you change your scenario, you need to update the type in the test file too! + +## DbAuth + +When you setup dbAuth, the generated files in `api/src/lib/auth.ts` and `api/src/functions/auth.ts` have all the types you need. Let's break down some of the utility types. + +### `DbAuthSession` + +You'll notice an import at the top of `api/src/lib/auth.ts`: + +```ts title="api/src/lib/auth.ts" +import type { DbAuthSession } from '@redwoodjs/api' +``` + +`DbAuthSession` is a utility type that's used to type the argument to `getCurrentUser`, `session`: + +```ts title="api/src/lib/auth.ts" +export const getCurrentUser = async (session: DbAuthSession<number>) => { + return await db.user.findUnique({ + where: { id: session.id }, + select: { id: true }, + }) +} +``` + +The generic it takes should be the type of your User model's `id` field. +It's usually a `string` or a `number`, but it depends on how you've defined it. + +Because a session only ever contains `id`, all we're doing here is defining the type of `id`. + +### `DbAuthHandlerOptions` + +`DbAuthHandlerOptions` gives you access to all the types you need to configure your dbAuth handler function in `api/src/function/auth.ts`. +It also takes a generic, `TUser`—the type of your User model. Note that this is not the same type as `CurrentUser`. + +You can import the type of the User model directly from Prisma and pass it to `DbAuthHandlerOptions`: + +```ts +import type { User as PrismaUser } from '@prisma/client' + +import type { DbAuthHandlerOptions } from '@redwoodjs/api' + +export const handler = async ( + event: APIGatewayProxyEvent, + context: Context +) => { + // Pass in the generic to the type here 👇 + const forgotPasswordOptions: DbAuthHandlerOptions<PrismaUser>['forgotPassword'] = { + + // ... + + // Now in the handler function, `user` will be typed + handler: (user) => { + return user + }, + + // ... + + } + + // ... + +} +``` + +Note that in strict mode, you'll likely see errors where the handlers expect "truthy" values. All you have to do is make sure you return a boolean. For example, `return !!user` instead of `return user`. + +## Directives + + +### `ValidatorDirectiveFunc` +When you generate a [validator directive](directives.md#validators) you will see your `validate` function typed already with `ValidatorDirectiveFunc<TDirectiveArgs>` + +```ts +import { + createValidatorDirective, + // highlight-next-line + ValidatorDirectiveFunc, +} from '@redwoodjs/graphql-server' + +export const schema = gql` + directive @myValidator on FIELD_DEFINITION +` +// 👇 makes sure "context" and directive args are typed +// highlight-next-line +const validate: ValidatorDirectiveFunc = ({ context, directiveArgs }) => { +``` + +This type takes a single generic - the type of your `directiveArgs`. + +Let's take a look at the built-in `@requireAuth(roles: ["ADMIN"])` directive, for example - which we ship with your Redwood app by default in `./api/src/directives/requireAuth/requireAuth.ts` + +```ts +// highlight-next-line +type RequireAuthValidate = ValidatorDirectiveFunc<{ roles?: string[] }> + +const validate: RequireAuthValidate = ({ directiveArgs }) => { + // roles 👇 will be typed correctly as string[] | undefined + // highlight-next-line + const { roles } = directiveArgs + // .... +} +``` + +| Generic | Description | +|:-----------------|:----------------------------------------------------------| +| `TDirectiveArgs` | The type of arguments passed to your directive in the SDL | + +### `TransformerDirectiveFunc` +When you generate a [transformer directive](directives.md#transformers) you will see your `transform` function typed with `TransformDirectiveFunc<TField, TDirectiveArgs>`. + +```ts +// 👇 makes sure the functions' arguments are typed +// highlight-next-line +const transform: TransformerDirectiveFunc = ({ context, resolvedValue }) => { +``` + +This type takes two generics - the type of the field you are transforming, and the type of your `directiveArgs`. + +So for example, let's say you have a transformer directive `@maskedEmail(permittedRoles: ['ADMIN'])` that you apply to `String` fields. You would pass in the following types + +```ts +type MaskedEmailTransform = TransformerDirectiveFunc<string, {permittedRoles?: string[]}> +``` + +| Generic | Description | +|:-----------------|:-------------------------------------------------------------------------------| +| `TField` | This will type `resolvedValue` i.e. the type of the field you are transforming | +| `TDirectiveArgs` | The type of arguments passed to your directive in the SDL | + + diff --git a/docs/versioned_docs/version-7.0/vite-configuration.md b/docs/versioned_docs/version-7.0/vite-configuration.md new file mode 100644 index 000000000000..ce9df4b866f2 --- /dev/null +++ b/docs/versioned_docs/version-7.0/vite-configuration.md @@ -0,0 +1,72 @@ +--- +description: If you have to configure Vite, here's how +--- + +# Vite Configuration + +Redwood uses Vite. One of Redwood's tenets is convention over configuration. + +Vite is an awesome build tool, but we don't want it to be something that you have to be familiar with to be productive. +So it's worth repeating that you don't have to do any of this, because we configure everything you will need out of the box with a Redwood Vite plugin. + +Regardless, there'll probably come a time when you have to configure Vite. All the Vite configuration for your web side sits in `web/vite.config.{js,ts}`, and can be configured the same as any other Vite project. Let's take a peek! + +```js +import dns from 'dns'; +import { defineConfig } from 'vite'; +import redwood from '@redwoodjs/vite'; + +dns.setDefaultResultOrder('verbatim'); + +const viteConfig = { + plugins: [ + // 👇 this is the RedwoodJS Vite plugin, that houses all the default configuration + redwood() + // ... add any custom Vite plugins you would like here + ], + // You can override built in configuration like server, optimizeDeps, etc. here +}; +export default defineConfig(viteConfig); + +``` + +Checkout Vite's docs on [configuration](https://vitejs.dev/config/) + + +### Sass and Tailwind CSS + +Redwood is already configured to use Sass, if the packages are there: + +``` +yarn workspace web add -D sass sass-loader +``` + +And if you want to use Tailwind CSS, just run the setup command: + +``` +yarn rw setup ui tailwindcss +``` + +## Vite Dev Server + +Redwood uses Vite's preview server for local development. +When you run `yarn rw dev`, keys in your `redwood.toml`'s `[web]` table—like `port` and `apiUrl`—are used as vite preview server options (in this case, [preview.port](https://vitejs.dev/config/preview-options.html#preview-port) and [preview.proxy](https://vitejs.dev/config/preview-options.html#preview-proxy) respectively). + +> You can peek at all the out-of-the-box configuration for your Vite preview server in the [RedwoodJS Vite plugin](https://github.com/redwoodjs/redwood/blob/main/packages/vite/src/index.ts) + +### Using `--forward` + +While you can configure Vite using `web/vite.config.js`, it's often simpler to use `yarn rw dev`'s `--forward` option. + +For example, if you want to force optimise your Vite dependencies again, you can run: + +``` +yarn rw dev --fwd="--force" +``` + +You can also use `--forward` to override keys in your `redwood.toml`. +For example, the following starts your app on port `1234` and disables automatic browser opening: + +``` +yarn rw dev --forward="--port 1234 --no-open" +``` diff --git a/docs/versioned_docs/version-7.0/webhooks.md b/docs/versioned_docs/version-7.0/webhooks.md new file mode 100644 index 000000000000..a185c7ebc4fa --- /dev/null +++ b/docs/versioned_docs/version-7.0/webhooks.md @@ -0,0 +1,815 @@ +--- +description: Securely integrate third-party services +--- + +# Webhooks + +If you've used [IFTTT](https://ifttt.com/maker_webhooks), [Pipedream](https://pipedream.com/docs/api/rest/webhooks/), or [Zapier](https://zapier.com/apps/webhook/integrations) then you're familiar with how webhooks can give your app the power to create complex workflows, build one-to-one automation, and sync data between apps. RedwoodJS helps you work with webhooks by giving you the tools to both receive and verify incoming webhooks and sign outgoing ones with ease. + +## What is a webhook + +Simply put, webhooks are a common way that third-party services notify your RedwoodJS application when an event of interest happens. They are a form of messaging and automation allowing distinct web applications to communicate with each other and send real-time data from one application to another whenever a given event occurs. + +The third-party considers these "outgoing Webhooks" and therefore your application receives "incoming Webhooks". + +When the api side of your Redwood app receives a webhook, it can parse it, process it, save it to replay later, or any other action needed. + +Webhooks are different from other integration methods in that the third-party pushes new events to your app instead of your app constantly pulling or polling for new data. + +### Examples of Webhooks + +Some examples of outgoing Webhooks are: + +- Netlify successfully [deploys a site](https://docs.netlify.com/site-deploys/notifications/#outgoing-webhooks) +- Someone [pushes a PR to GitHub](https://docs.github.com/en/developers/webhooks-and-events/creating-webhooks) +- Someone [posts in Discourse](https://meta.discourse.org/t/setting-up-webhooks/49045) +- Stripe [completes a purchase](https://stripe.com/docs/webhooks) +- A cron/scheduled task wants to invoke a long running [background function on Netlify](https://docs.netlify.com/functions/background-functions/) +- and more webhook integrations via services like [IFTTT](https://ifttt.com/maker_webhooks), [Pipedream](https://pipedream.com/docs/api/rest/webhooks/) and [Zapier](https://zapier.com/apps/webhook/integrations) + +If you were to subscribe to one of these webhooks, you'd point it to an endpoint in your RedwoodJS api -- ie, a serverless function. But, because that function is out "in the cloud" you need to ensure that these run **only when they should**. That means your function must: + +- verify it comes from the place you expect +- trust the party +- know the payload sent in the hook hasn't been tampered with +- ensure that the hook isn't reprocessed or replayed (sometimes) + +That is, you need to **verify your incoming webhooks**. + +## Verifying Webhooks with RedwoodJS Made Easy + +The RedwoodJS [`api/webhooks` package](https://github.com/redwoodjs/redwood/blob/main/packages/api/src/webhooks/index.ts) makes it easy to receive and verify incoming webhooks by implementing many of the most commonly used Webhook signature verifiers. + +### Webhook Verification + +Webhooks have a few ways of letting you know they should be trusted. The most common is by sending along a "signature" header. They typically sign their payload with a secret key (in a few ways) and expect you to validate the signature before processing it. + +### Webhook Signature Verifiers + +Common signature verification methods are: + +- SHA256 ([GitHub](https://docs.github.com/en/developers/webhooks-and-events/securing-your-webhooks#validating-payloads-from-github) and [Discourse](https://meta.discourse.org/t/setting-up-webhooks/49045)) +- Base64 SHA256 ([Svix](https://docs.svix.com/receiving/verifying-payloads/how-manual) and [Clerk](https://docs.clerk.dev/reference/webhooks#verifying-requests)) +- SHA1 ([Vercel](https://vercel.com/docs/integrations?query=webhook%20sha1#webhooks/securing-webhooks)) +- JWT ([Netlify](https://docs.netlify.com/site-deploys/notifications/#outgoing-webhooks)) +- Timestamp Scheme ([Stripe](https://stripe.com/docs/webhooks/best-practices) / Redwood default) +- Secret Key (Custom, [Orbit](https://docs.orbit.love/docs/webhooks)) + +RedwoodJS adds a way to do no verification as well of testing or in the case your third party doesn't sign the payload. + +- SkipVerifier (bypass verification, or no verification) + +RedwoodJS implements [signatureVerifiers](https://github.com/redwoodjs/redwood/tree/main/packages/api/src/auth/verifiers) for each of these so you can get started integrating your app with third-parties right away. + +```jsx +export type SupportedVerifiers = + | SkipVerifier + | SecretKeyVerifier + | Sha1Verifier + | Sha256Verifier + | Base64Sha1Verifier + | Base64Sha256Verifier + | TimestampSchemeVerifier + | JwtVerifier +``` + +Each `SupportedVerifier` implements a method to `sign` and `verify` a payload with a secret (if needed). + +When the webhook needs [creates a verifier](https://github.com/redwoodjs/redwood/blob/main/packages/api/src/auth/verifiers/index.ts#L12) in order to `verifyEvent`, `verifySignature` or `signPayload` it does so via: + +```jsx +createVerifier(type, options) +``` + +where type is one of the supported verifiers and `VerifyOptions` sets the +options the verifier needs to sign or verify. + +```jsx +/** + * VerifyOptions + * + * Used when verifying a signature based on the verifier's requirements + * + * @param {string} signatureHeader - Optional Header that contains the signature + * to verify. Will default to DEFAULT_WEBHOOK_SIGNATURE_HEADER + * @param {(signature: string) => string} signatureTransformer - Optional + * function that receives the signature from the headers and returns a new + * signature to use in the Verifier + * @param {number} currentTimestampOverride - Optional timestamp to use as the + * "current" timestamp, in msec + * @param {number} eventTimestamp - Optional timestamp to use as the event + * timestamp, in msec. If this is provided the webhook verification will fail + * if the eventTimestamp is too far from the current time (or the time passed + * as the `currentTimestampOverride` option) + * @param {number} tolerance - Optional tolerance in msec + * @param {string} issuer - Options JWT issuer for JWTVerifier + */ +export interface VerifyOptions { + signatureHeader?: string + signatureTransformer?: (signature: string) => string + currentTimestampOverride?: number + eventTimestamp?: number + tolerance?: number + issuer?: string +} +``` + +## How to Receive and Verify an Incoming Webhook + +The `api/webhooks` package exports [verifyEvent and verifySignature](https://github.com/redwoodjs/redwood/blob/main/packages/api/src/webhooks/index.ts) to apply [verification methods](https://github.com/redwoodjs/redwood/tree/main/packages/api/src/auth/verifiers) and verify the event or some portion of the event payload with a signature as defined in its [VerifyOptions](https://github.com/redwoodjs/redwood/blob/main/packages/api/src/webhooks/common.ts). +If the signature fails verification, a `WebhookSignError` is raised which can be caught to return a `401` unauthorized. + +Typically, for each integration you'll define 1) the events that triggers the webhook or the schedule via cron/conditions to send the webhook, 2) a secret, and 3) the endpoint to send the webhook to (ie, your endpoint). + +When the third-party creates the outgoing webhook payload, they'll sign it (typically the event request body) and add that signature to the request headers with some key. + +When your endpoint receives the request (incoming webhook), it can extract the signature using the signature header key set in `VerifyOptions`, transform it using the `signatureTransformer` function also defined in `VerifyOptions`, use the appropriate verifier, and validate the payload to ensure it comes from a trusted source. + +Note that: + +- `verifyEvent` will detect if the event body is base64 encoded, then decode and validate the payload with the signature verifier +- signatureHeader specified in `VerifyOptions` will be converted to lowercase when fetching the signature from the event headers + +You can then use the payload data with confidence in your function. + +### SHA256 Verifier (used by GitHub, Discourse) + +SHA256 HMAC is one of the most popular signatures. It's used by [Discourse](https://meta.discourse.org/t/setting-up-webhooks/49045) and [GitHub](https://docs.github.com/en/developers/webhooks-and-events/securing-your-webhooks#validating-payloads-from-github). + +When your secret token is set, GitHub uses it to create a hash signature with each payload. This hash signature is included with the headers of each request as `X-Hub-Signature-256`. + +For Discourse, when an event is triggered, it `POST`s a webhook with `X-Discourse-Event-Signature` in the HTTP header to your endpoint. It’s computed by SHA256. + +```jsx +import type { APIGatewayEvent } from 'aws-lambda' +import { + verifyEvent, + VerifyOptions, + WebhookVerificationError, +} from '@redwoodjs/api/webhooks' + +import { logger } from 'src/lib/logger' + +export const handler = async (event: APIGatewayEvent) => { + const discourseInfo = { webhook: 'discourse' } + const webhookLogger = logger.child({ discourseInfo }) + + webhookLogger.trace('Invoked discourseWebhook function') + + try { + const options = { + signatureHeader: 'X-Discourse-Event-Signature', + } as VerifyOptions + + verifyEvent('sha256Verifier', { + event, + secret: process.env.DISCOURSE_WEBHOOK_SECRET, + options, + }) + + webhookLogger.debug({ headers: event.headers }, 'Headers') + + const payload = JSON.parse(event.body) + + webhookLogger.debug({ payload }, 'Body payload') + + // Safely use the validated webhook payload + + return { + headers: { + 'Content-Type': 'application/json', + }, + statusCode: 200, + body: JSON.stringify({ + data: payload, + }), + } + } catch (error) { + if (error instanceof WebhookVerificationError) { + webhookLogger.warn('Unauthorized') + + return { + statusCode: 401, + } + } else { + webhookLogger.error({ error }, error.message) + + return { + headers: { + 'Content-Type': 'application/json', + }, + statusCode: 500, + body: JSON.stringify({ + error: error.message, + }), + } + } + } +} +``` + +### Base64 SHA256 Verifier (used by Svix, Clerk) + +This is a variation on the SHA256 HMAC verification that works with binary buffers encoded with base64. It's used by [Svix](https://docs.svix.com/receiving/verifying-payloads/how-manual) and [Clerk](https://docs.clerk.dev/reference/webhooks#verifying-requests). + +Svix (and by extension, Clerk) gives you a secret token that it uses to create a hash signature with each payload. This hash signature is included with the headers of each request as `svix-signature`. + +> Some production environments, like Vercel, might base64 encode the request body string. In that case, the body must be conditionally parsed. +> ```js +> export const handler = async (event: APIGatewayEvent) => { +> const body = event.isBase64Encoded +> ? Buffer.from(event.body, 'base64').toString('utf-8') +> : event.body +> ``` + +```tsx +import type { APIGatewayEvent } from 'aws-lambda' +import { + verifyEvent, + VerifyOptions, + WebhookVerificationError, +} from '@redwoodjs/api/webhooks' + +import { logger } from 'src/lib/logger' + +export const handler = async (event: APIGatewayEvent) => { + const clerkInfo = { webhook: 'clerk' } + const webhookLogger = logger.child({ clerkInfo }) + + webhookLogger.trace('Invoked clerkWebhook function') + + try { + const options: VerifyOptions = { + signatureHeader: 'svix-signature', + signatureTransformer: (signature: string) => { + // Clerk can pass a space separated list of signatures. + // Let's just use the first one that's of version 1 + const passedSignatures = signature.split(' ') + + for (const versionedSignature of passedSignatures) { + const [version, signature] = versionedSignature.split(',') + + if (version === 'v1') { + return signature + } + } + }, + } + + const svix_id = event.headers['svix-id'] + const svix_timestamp = event.headers['svix-timestamp'] + + verifyEvent('base64Sha256Verifier', { + event, + secret: process.env.CLERK_WH_SECRET.slice(6), + payload: `${svix_id}.${svix_timestamp}.${event.body}`, + options, + }) + + webhookLogger.debug({ headers: event.headers }, 'Headers') + + const payload = JSON.parse(event.body) + + webhookLogger.debug({ payload }, 'Body payload') + + // Safely use the validated webhook payload + + return { + headers: { + 'Content-Type': 'application/json', + }, + statusCode: 200, + body: JSON.stringify({ + data: payload, + }), + } + } catch (error) { + if (error instanceof WebhookVerificationError) { + webhookLogger.warn('Unauthorized') + + return { + statusCode: 401, + } + } else { + webhookLogger.error({ error }, error.message) + + return { + headers: { + 'Content-Type': 'application/json', + }, + statusCode: 500, + body: JSON.stringify({ + error: error.message, + }), + } + } + } +} +``` + +### SHA1 Verifier (used by Vercel) + +- [Vercel](https://vercel.com/docs/integrations?query=webhook%20sha1#webhooks/securing-webhooks) + +Vercel signs its webhooks with SHA also base64 encodes the event. + +RedwoodJS `verifyEvent` will detect is the event is base64 encoded, decode and then validate the payload with the signature. + +```jsx +import type { APIGatewayEvent } from 'aws-lambda' +import { + verifyEvent, + VerifyOptions, + WebhookVerificationError, +} from '@redwoodjs/api/webhooks' + +import { logger } from 'src/lib/logger' + +export const handler = async (event: APIGatewayEvent) => { + const vercelInfo = { webhook: 'vercel' } + const webhookLogger = logger.child({ vercelInfo }) + + webhookLogger.trace('Invoked vercelWebhook function') + + try { + const options = { + signatureHeader: 'x-vercel-signature', + } as VerifyOptions + + verifyEvent('sha256Verifier', { + event, + secret: process.env.DISCOURSE_WEBHOOK_SECRET, + options, + }) + + webhookLogger.debug({ headers: event.headers }, 'Headers') + + const payload = JSON.parse(event.body) + + webhookLogger.debug({ payload }, 'Body payload') + + // Safely use the validated webhook payload + + return { + headers: { + 'Content-Type': 'application/json', + }, + statusCode: 200, + body: JSON.stringify({ + data: payload, + }), + } + } catch (error) { + if (error instanceof WebhookVerificationError) { + webhookLogger.warn('Unauthorized') + + return { + statusCode: 401, + } + } else { + webhookLogger.error({ error }, error.message) + + return { + headers: { + 'Content-Type': 'application/json', + }, + statusCode: 500, + body: JSON.stringify({ + error: error.message, + }), + } + } + } +} +``` + +### TimestampScheme Verifier (used by Stripe) + +The TimestampScheme verifier not only signs the payload with a secret (SHA256), but also includes a timestamp to prevent [replay attacks](https://en.wikipedia.org/wiki/Replay_attack) and a scheme (i.e., a version) to further protect webhooks. + +A replay attack is when an attacker intercepts a valid payload and its signature, then re-transmits them. To mitigate such attacks, third-parties like Stripe includes a timestamp in the Stripe-Signature header. Because this timestamp is part of the signed payload, it is also verified by the signature, so an attacker cannot change the timestamp without invalidating the signature. If the signature is valid but the timestamp is too old, you can have your application reject the payload. + +When verifying, there is a default tolerance of five minutes between the event timestamp and the current time but you can override this default by setting the [`tolerance` option](https://github.com/redwoodjs/redwood/blob/main/packages/api/src/auth/verifiers/timestampSchemeVerifier.ts) in the `VerifyOptions` passed to the verifier to another value (in milliseconds). + +Also, if for some reason you need to adjust the timestamp used to compare the tolerance to a different time (say in the past), then you may override this by setting the [`currentTimestampOverride` option](https://github.com/redwoodjs/redwood/blob/main/packages/api/src/auth/verifiers/timestampSchemeVerifier.ts) in the `VerifyOptions` passed to the verifier. + +- [Stripe](https://stripe.com/docs/webhooks/best-practices) +- Used in a Cron Job that triggers a Webhook periodically to background task via a serverless function + +The TimestampScheme is particularly useful when used with cron jobs because if for some reason the webhook is delayed between when it is created and sent/received your app can discard it and thus old information would not risk overwriting newer data. + +```jsx +import type { APIGatewayEvent } from 'aws-lambda' + +import { + verifyEvent, + VerifyOptions, + WebhookVerificationError, +} from '@redwoodjs/api/webhooks' +import { logger } from 'src/lib/logger' +import { perform } from 'src/lib/orbit/jobs/loadActivitiesJob' + +/** + * The handler function is your code that processes http request events. + * You can use return and throw to send a response or error, respectively. + * + * @typedef { import('aws-lambda').APIGatewayEvent } APIGatewayEvent + * @typedef { import('aws-lambda').Context } Context + * @param { APIGatewayEvent } event - an object which contains information from the invoker. + * @param { Context } context - contains information about the invocation, + * function, and execution environment. + */ +export const handler = async (event: APIGatewayEvent) => { + const webhookInfo = { webhook: 'loadOrbitActivities-background' } + + const webhookLogger = logger.child({ webhookInfo }) + + webhookLogger.trace('>> in loadOrbitActivities-background') + + try { + const options = { + signatureHeader: 'RW-Webhook-Signature', + // You may override these defaults + // tolerance: 60_000, + // timestamp: new Date().getDate() - 1, + } as VerifyOptions + + verifyEvent('timestampSchemeVerifier', { + event, + secret: process.env.WEBHOOK_SECRET, + options, + }) + + await perform() + + return { + headers: { + 'Content-Type': 'application/json', + }, + statusCode: 200, + body: JSON.stringify({ + data: `loadOrbitActivities scheduled job invoked at ${Date.now()}`, + }), + } + } catch (error) { + if (error instanceof WebhookVerificationError) { + webhookLogger.warn( + { webhook: 'loadOrbitActivities-background' }, + 'Unauthorized' + ) + return { + statusCode: 401, + } + } else { + webhookLogger.error( + { webhook: 'loadOrbitActivities-background', error }, + error.message + ) + return { + headers: { + 'Content-Type': 'application/json', + }, + statusCode: 500, + body: JSON.stringify({ + error: error.message, + }), + } + } + } +} +``` + +### JWT Signature (used by Netlify) + +- [Netlify Outgoing Webhooks](https://docs.netlify.com/site-deploys/notifications/#outgoing-webhooks) + +The JSON Web Token (JWT) Verifier not only cryptographically compares the signature to the payload to ensure it hasn't been tampered with, but also gives the added JWT claims like `issuer` and `expires` — you can trust that the Webhook was sent by a trusted sounds and isn't out of date. + +Here, the `VerifyOptions` not only specify the expected signature header, but allow will check that the `iss` claim is netlify. + +```jsx + const options = { + signatureHeader: 'X-Webhook-Signature', + issuer: 'netlify', + } as VerifyOptions +``` + +See: [Introduction to JSON Web Tokens](https://jwt.io/introduction) for more information. + +```jsx +import type { APIGatewayEvent } from 'aws-lambda' +import { + verifyEvent, + VerifyOptions, + WebhookVerificationError, +} from '@redwoodjs/api/webhooks' + +import { logger } from 'src/lib/logger' + +/** + * The handler function is your code that processes http request events. + * You can use return and throw to send a response or error, respectively. + * + * @typedef { import('aws-lambda').APIGatewayEvent } APIGatewayEvent + * @typedef { import('aws-lambda').Context } Context + * @param { APIGatewayEvent } event - an object which contains information from the invoker. + * @param { Context } context - contains information about the invocation, + * function, and execution environment. + */ +export const handler = async (event: APIGatewayEvent) => { + const netlifyInfo = { + webhook: 'verifyNetlifyWebhook', + headers: event.headers['x-netlify-event'], + } + const webhookLogger = logger.child({ netlifyInfo }) + + try { + webhookLogger.debug('Received Netlify event') + + const options = { + signatureHeader: 'X-Webhook-Signature', + issuer: 'netlify', + } as VerifyOptions + + verifyEvent('jwtVerifier', { + event, + secret: process.env.NETLIFY_DEPLOY_WEBHOOK_SECRET, + options, + }) + const payload = JSON.parse(event.body) + + // Safely use the validated webhook payload + + webhookLogger.debug({ payload }, 'Now I can do things with the payload') + + return { + headers: { + 'Content-Type': 'application/json', + }, + statusCode: 200, + body: JSON.stringify({ + data: payload, + }), + } + } catch (error) { + if (error instanceof WebhookVerificationError) { + webhookLogger.warn('Unauthorized') + return { + statusCode: 401, + } + } else { + webhookLogger.error({ error }, error.message) + return { + headers: { + 'Content-Type': 'application/json', + }, + statusCode: 500, + body: JSON.stringify({ + error: error.message, + }), + } + } + } +} +``` + +### Secret Key Verifier (used by Orbit) + +- [Orbit Webhook Doc](https://docs.orbit.love/docs/webhooks) + +The Secret Key verifiers used by [Orbit](https://docs.orbit.love/docs/webhooks) acts very much like a password. It doesn't perform some cryptographic comparison of the signature with the payload received, but rather simple checks if the expected key or token is present. + +```jsx +//import type { APIGatewayEvent, Context } from 'aws-lambda' +import { + verifyEvent, + // VerifyOptions, + WebhookVerificationError, +} from '@redwoodjs/api/webhooks' + +import { deserialize } from 'deserialize-json-api' +import { parser, persister } from 'src/lib/orbit/loaders/activityLoader' + +import { logger } from 'src/lib/logger' + +const webhookDetails = (event) => { + const webhook = 'orbitWebhook-background' + const orbitEvent = event.headers['x-orbit-event'] || '' + const orbitEventId = event.headers['x-orbit-event-id'] || '' + const orbitEventType = event.headers['x-orbit-event-type'] || '' + const orbitUserAgent = event.headers['user-agent'] || '' + const orbitSignature = event.headers['x-orbit-signature'] || '' + + return { + webhook, + orbitEvent, + orbitEventId, + orbitEventType, + orbitUserAgent, + orbitSignature, + } +} + +/** + * The handler function is your code that processes http request events. + * You can use return and throw to send a response or error, respectively. + * + * Important: When deployed, a custom serverless function is an open API endpoint and + * is your responsibility to secure appropriately. + * + * @see {@link https://redwoodjs.com/docs/serverless-functions#security-considerations|Serverless Function Considerations} + * in the RedwoodJS documentation for more information. + * + * @typedef { import('aws-lambda').APIGatewayEvent } APIGatewayEvent + * @typedef { import('aws-lambda').Context } Context + * @param { APIGatewayEvent } event - an object which contains information from the invoker. + * @param { Context } context - contains information about the invocation, + * function, and execution environment. + */ +export const handler = async (event) => { + const orbitInfo = webhookDetails(event) + + const webhookLogger = logger.child({ orbitInfo }) + + webhookLogger.info(`>> in webhook`) + + try { + const options = { + signatureHeader: 'X-Orbit-Signature', + } + verifyEvent('secretKeyVerifier', { + event, + secret: process.env.ORBIT_WEBHOOK_SECRET, + options, + }) + + if (orbitInfo.orbitEventType === 'activity:created') { + const parsedActivity = parseEventPayload(event) + + // Safely use the validated webhook payload + + return { + headers: { + 'Content-Type': 'application/json', + }, + statusCode: 200, + body: JSON.stringify({ + data: 'orbitWebhook done', + }), + } + } else { + webhookLogger.warn(`Unsupported Orbit Event Type: ${orbitInfo.orbitEventType}`) + return { + headers: { + 'Content-Type': 'application/json', + }, + statusCode: 400, + body: JSON.stringify({ + data: `Unsupported Orbit Event Type: ${orbitInfo.orbitEventType}`, + }), + } + } + } catch (error) { + if (error instanceof WebhookVerificationError) { + webhookLogger.warn('Unauthorized') + return { + statusCode: 401, + } + } else { + webhookLogger.error({ error }, error.message) + return { + headers: { + 'Content-Type': 'application/json', + }, + statusCode: 500, + body: JSON.stringify({ + error: error.message, + }), + } + } + } +} +``` + +### Skip Verifier (used by Livestorm) + +[Livestorm](https://support.livestorm.co/article/119-webhooks) sends webhooks but doesn't sign them with a secret. + +Here, you can use the `skipVerifier` -- or choose not to validate altogether, but setting up to `verifyEvent` would let you quickly change the verification method if their changes. + +You can also use the `skipVerifier` in testing or in `dev` so that you needn't share your secrets with other developers. + +In that case, you might set `WEBHOOK_VERIFICATION=skipVerifier` and use the envar in `verifyEvent(process.env.WEBHOOK_VERIFICATION, { event })`. + +```jsx +import type { APIGatewayEvent } from 'aws-lambda' +import { verifyEvent, WebhookVerificationError } from '@redwoodjs/api/webhooks' + +import { logger } from 'src/lib/logger' + +/** + * The handler function is your code that processes http request events. + * You can use return and throw to send a response or error, respectively. + * + * @typedef { import('aws-lambda').APIGatewayEvent } APIGatewayEvent + * @typedef { import('aws-lambda').Context } Context + * @param { APIGatewayEvent } event - an object which contains information from the invoker. + * @param { Context } context - contains information about the invocation, + * function, and execution environment. + */ +export const handler = async (event: APIGatewayEvent) => { + const livestormInfo = { webhook: 'livestorm' } + const webhookLogger = logger.child({ livestormInfo }) + + webhookLogger.trace('Livestorm') + + webhookLogger.debug({ event: event }, 'The Livestorm event') + + // Use the webhook payload + // Note: since the payload is not signed, you may want to validate other header info + + try { + verifyEvent('skipVerifier', { event }) + + const data = JSON.parse(event.body) + + webhookLogger.debug({ payload: data }, 'Data from Livestorm') + + return { + headers: { + 'Content-Type': 'application/json', + }, + statusCode: 200, + body: JSON.stringify({ + data, + }), + } + } catch (error) { + if (error instanceof WebhookVerificationError) { + webhookLogger.warn('Unauthorized') + + return { + statusCode: 401, + } + } else { + webhookLogger.error({ error }, error.message) + + return { + headers: { + 'Content-Type': 'application/json', + }, + statusCode: 500, + body: JSON.stringify({ + error: error.message, + }), + } + } + } +} +``` + +## Signing a Payload for an Outgoing Webhook + +To sign a payload for an outgoing webhook, the `api/webhooks` package exports [signPayload](https://github.com/redwoodjs/redwood/blob/main/packages/api/src/webhooks/index.ts), a function that signs a payload using a [verification method](https://github.com/redwoodjs/redwood/tree/main/packages/api/src/auth/verifiers), creating your "webhook signature". Once you have the signature, you can add it to your request's http headers with a name of your choosing, and then post the request to the endpoint: + +```jsx +import got from 'got' +import { signPayload } from '@redwoodjs/api/webhooks' + +const YOUR_OUTGOING_WEBHOOK_DESTINATION_URL = 'https://example.com/receive' +const YOUR_WEBHOOK_SIGNATURE = process.env.WEBHOOK_SIGNATURE + +export const sendOutGoingWebhooks = async ({ payload }) => { + const signature = signPayload('timestampSchemeVerifier', { + payload, + secret, + }) + + await got.post(YOUR_OUTGOING_WEBHOOK_DESTINATION_URL, { + responseType: 'json', + + json: { + payload, + }, + headers: { + YOUR_WEBHOOK_SIGNATURE: signature, + }, + }) +} +``` + +## How To Test Webhooks + +Because your webhook is typically sent from a third-party's system, manually testing webhooks can be difficult and time-consuming. See [How To Test Webhooks](serverless-functions.md#how-to-test-webhooks) to learn how to write tests that can automate tests and help you implement your webhook handler. + +## More Information + +Want to learn more about webhooks? + +- [Webhook.site lets you easily inspect, test and automate (with the visual Custom Actions builder, or WebhookScript) any incoming HTTP request or e-mail.](https://webhook.site/#!/) +- [What is a Webhook](https://simonfredsted.com/1583) by Simon Fredsted +- [About Webhooks](https://docs.github.com/en/developers/webhooks-and-events/about-webhooks) on GitHub +- [What are Webhooks? A simple guide to connection apps with webhooks](https://zapier.com/blog/what-are-webhooks/) on Zapier +- [What are Webhooks? Easy Explanation & Tutorial](https://snipcart.com/blog/what-are-webhooks-explained-example) on Snipcart +- [What are Webhooks and Why You Can’t Afford to Ignore Them](https://www.chargebee.com/blog/what-are-webhooks-explained/) on Charbee +- [What is a webhook: How they work and how to set them up](https://www.getvero.com/resources/webhooks/) on Vero diff --git a/docs/versioned_sidebars/version-6.0-sidebars.json b/docs/versioned_sidebars/version-6.x-sidebars.json similarity index 99% rename from docs/versioned_sidebars/version-6.0-sidebars.json rename to docs/versioned_sidebars/version-6.x-sidebars.json index 0c1cd6bfca01..f87cb7b22ca3 100644 --- a/docs/versioned_sidebars/version-6.0-sidebars.json +++ b/docs/versioned_sidebars/version-6.x-sidebars.json @@ -195,15 +195,18 @@ ] }, "directives", + "docker", "environment-variables", "forms", "graphql", "local-postgres-setup", "logger", + "mailer", "mocking-graphql-requests", "prerender", "project-configuration-dev-test-build", "redwoodrecord", + "realtime", "router", "schema-relations", "security", diff --git a/docs/versioned_sidebars/version-7.0-sidebars.json b/docs/versioned_sidebars/version-7.0-sidebars.json new file mode 100644 index 000000000000..c19b617d3bff --- /dev/null +++ b/docs/versioned_sidebars/version-7.0-sidebars.json @@ -0,0 +1,322 @@ +{ + "main": [ + "introduction", + "quick-start", + { + "type": "category", + "label": "Tutorial", + "items": [ + { + "type": "doc", + "label": "Foreword", + "id": "tutorial/foreword" + }, + { + "Chapter 0": [ + "tutorial/chapter0/what-is-redwood" + ] + }, + { + "Chapter 1": [ + "tutorial/chapter1/prerequisites", + "tutorial/chapter1/installation", + "tutorial/chapter1/file-structure", + "tutorial/chapter1/first-page", + "tutorial/chapter1/second-page", + "tutorial/chapter1/layouts" + ] + }, + { + "Chapter 2": [ + "tutorial/chapter2/getting-dynamic", + "tutorial/chapter2/cells", + "tutorial/chapter2/side-quest", + "tutorial/chapter2/routing-params" + ] + }, + { + "Chapter 3": [ + "tutorial/chapter3/forms", + "tutorial/chapter3/saving-data" + ] + }, + { + "Chapter 4": [ + "tutorial/chapter4/authentication", + "tutorial/chapter4/deployment" + ] + }, + "tutorial/intermission", + { + "Chapter 5": [ + "tutorial/chapter5/storybook", + "tutorial/chapter5/first-story", + "tutorial/chapter5/testing", + "tutorial/chapter5/first-test" + ] + }, + { + "Chapter 6": [ + "tutorial/chapter6/the-redwood-way", + "tutorial/chapter6/multiple-comments", + "tutorial/chapter6/comments-schema", + "tutorial/chapter6/comment-form" + ] + }, + { + "Chapter 7": [ + "tutorial/chapter7/rbac", + "tutorial/chapter7/api-side-currentuser" + ] + }, + "tutorial/afterword" + ] + }, + { + "type": "category", + "label": "Reference", + "link": { + "type": "generated-index", + "title": "Reference", + "slug": "/index" + }, + "items": [ + "a11y", + "app-configuration-redwood-toml", + "assets-and-files", + { + "type": "category", + "label": "Authentication", + "link": { + "type": "doc", + "id": "authentication" + }, + "items": [ + { + "type": "doc", + "id": "auth/dbauth" + }, + { + "type": "doc", + "id": "auth/auth0" + }, + { + "type": "doc", + "id": "auth/azure" + }, + { + "type": "doc", + "id": "auth/clerk" + }, + { + "type": "doc", + "id": "auth/custom" + }, + { + "type": "doc", + "id": "auth/firebase" + }, + { + "type": "doc", + "id": "auth/netlify" + }, + { + "type": "doc", + "id": "auth/supabase" + }, + { + "type": "doc", + "id": "auth/supertokens" + } + ] + }, + "builds", + "cells", + "cli-commands", + "connection-pooling", + "contributing-overview", + "contributing-walkthrough", + "cors", + "create-redwood-app", + "custom-web-index", + "data-migrations", + { + "type": "category", + "label": "Deployment", + "link": { + "type": "generated-index", + "title": "Deployment", + "slug": "deployment/index" + }, + "items": [ + { + "type": "doc", + "label": "Introduction", + "id": "deploy/introduction" + }, + { + "type": "doc", + "label": "Baremetal", + "id": "deploy/baremetal" + }, + { + "type": "doc", + "label": "GCP or AWS via Coherence", + "id": "deploy/coherence" + }, + { + "type": "doc", + "label": "AWS via Flightcontrol", + "id": "deploy/flightcontrol" + }, + { + "type": "doc", + "label": "Edgio", + "id": "deploy/edgio" + }, + { + "type": "doc", + "label": "Netlify", + "id": "deploy/netlify" + }, + { + "type": "doc", + "label": "Render", + "id": "deploy/render" + }, + { + "type": "doc", + "label": "Serverless Framework", + "id": "deploy/serverless" + }, + { + "type": "doc", + "label": "Vercel", + "id": "deploy/vercel" + } + ] + }, + "directives", + "docker", + "environment-variables", + "forms", + { + "type": "category", + "label": "GraphQL", + "link": { + "type": "generated-index", + "title": "GraphQL", + "slug": "graphql/index" + }, + "items": [ + { + "type": "doc", + "label": "About", + "id": "graphql" + }, + { + "type": "doc", + "label": "Fragments", + "id": "graphql/fragments" + }, + { + "type": "doc", + "label": "Trusted Documents", + "id": "graphql/trusted-documents" + }, + { + "type": "doc", + "label": "Mocking GraphQL Requests", + "id": "graphql/mocking-graphql-requests" + }, + { + "type": "doc", + "label": "Realtime", + "id": "graphql/realtime" + } + ] + }, + "local-postgres-setup", + "logger", + "mailer", + { + "type": "category", + "label": "Monitoring", + "link": { + "type": "generated-index", + "title": "Monitoring", + "slug": "monitoring/index" + }, + "items": [ + { + "type": "doc", + "label": "Sentry", + "id": "monitoring/sentry" + } + ] + }, + "prerender", + "project-configuration-dev-test-build", + "redwoodrecord", + "realtime", + "router", + "schema-relations", + "security", + "seo-head", + "serverless-functions", + "services", + "storybook", + "studio", + "testing", + "toast-notifications", + { + "type": "category", + "label": "TypeScript", + "link": { + "type": "generated-index", + "title": "TypeScript", + "slug": "typescript/index" + }, + "items": [ + { + "type": "doc", + "label": "Introduction", + "id": "typescript/introduction" + }, + { + "type": "doc", + "id": "typescript/generated-types" + }, + { + "type": "doc", + "label": "Utility Types", + "id": "typescript/utility-types" + }, + { + "type": "doc", + "label": "Strict Mode", + "id": "typescript/strict-mode" + } + ] + }, + "webhooks", + "vite-configuration" + ] + }, + { + "type": "category", + "label": "How To", + "link": { + "type": "generated-index", + "title": "How To", + "slug": "/how-to/index" + }, + "items": [ + { + "type": "autogenerated", + "dirName": "how-to" + } + ] + } + ] +} diff --git a/docs/versions.json b/docs/versions.json index 369cac7b1a7a..0b2ac7975068 100644 --- a/docs/versions.json +++ b/docs/versions.json @@ -1,5 +1,6 @@ [ - "6.0", + "7.0", + "6.x", "5.x", "4.x", "3.x", diff --git a/lerna.json b/lerna.json index de05c81be98b..764e550e83ac 100644 --- a/lerna.json +++ b/lerna.json @@ -1,5 +1,5 @@ { - "version": "6.0.7", + "version": "7.0.0", "npmClient": "yarn", "command": { "publish": { diff --git a/package.json b/package.json index 64b5638c701e..11a77a515664 100644 --- a/package.json +++ b/package.json @@ -68,7 +68,7 @@ "@babel/runtime-corejs3": "7.23.9", "@faker-js/faker": "8.0.2", "@npmcli/arborist": "7.2.2", - "@playwright/test": "1.41.1", + "@playwright/test": "1.41.2", "@testing-library/jest-dom": "6.3.0", "@testing-library/react": "14.1.2", "@testing-library/user-event": "14.5.2", @@ -107,7 +107,7 @@ "ora": "7.0.1", "prompts": "2.4.2", "rimraf": "5.0.5", - "tstyche": "1.0.0-beta.9", + "tstyche": "1.0.0", "tsx": "4.6.2", "typescript": "5.3.3", "vitest": "1.2.2", diff --git a/packages/adapters/fastify/web/package.json b/packages/adapters/fastify/web/package.json index ee37284118ec..b59769112fd5 100644 --- a/packages/adapters/fastify/web/package.json +++ b/packages/adapters/fastify/web/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/fastify-web", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -24,11 +24,11 @@ "@fastify/http-proxy": "9.3.0", "@fastify/static": "6.12.0", "@fastify/url-data": "5.4.0", - "@redwoodjs/project-config": "6.0.7", + "@redwoodjs/project-config": "7.0.0", "fast-glob": "3.3.2" }, "devDependencies": { - "@redwoodjs/framework-tools": "6.0.7", + "@redwoodjs/framework-tools": "7.0.0", "fastify": "4.25.2", "tsx": "4.6.2", "typescript": "5.3.3", diff --git a/packages/api-server/package.json b/packages/api-server/package.json index d2ec5fc63966..4b7f7d902673 100644 --- a/packages/api-server/package.json +++ b/packages/api-server/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/api-server", - "version": "6.0.7", + "version": "7.0.0", "description": "Redwood's HTTP server for Serverless Functions", "repository": { "type": "git", @@ -30,10 +30,10 @@ }, "dependencies": { "@fastify/url-data": "5.4.0", - "@redwoodjs/context": "6.0.7", - "@redwoodjs/fastify-web": "6.0.7", - "@redwoodjs/project-config": "6.0.7", - "@redwoodjs/web-server": "6.0.7", + "@redwoodjs/context": "7.0.0", + "@redwoodjs/fastify-web": "7.0.0", + "@redwoodjs/project-config": "7.0.0", + "@redwoodjs/web-server": "7.0.0", "chalk": "4.1.2", "chokidar": "3.5.3", "dotenv-defaults": "5.0.2", @@ -49,7 +49,7 @@ "yargs": "17.7.2" }, "devDependencies": { - "@redwoodjs/framework-tools": "6.0.7", + "@redwoodjs/framework-tools": "7.0.0", "@types/aws-lambda": "8.10.126", "@types/lodash": "4.14.201", "@types/qs": "6.9.11", @@ -62,7 +62,7 @@ "vitest": "1.2.2" }, "peerDependencies": { - "@redwoodjs/graphql-server": "6.0.7" + "@redwoodjs/graphql-server": "7.0.0" }, "peerDependenciesMeta": { "@redwoodjs/graphql-server": { diff --git a/packages/api-server/src/__tests__/createServer.test.ts b/packages/api-server/src/__tests__/createServer.test.ts index b70b6b6921a7..d58ae2eb6d3c 100644 --- a/packages/api-server/src/__tests__/createServer.test.ts +++ b/packages/api-server/src/__tests__/createServer.test.ts @@ -239,8 +239,8 @@ describe('resolveOptions', () => { DEFAULT_CREATE_SERVER_OPTIONS.fastifyServerOptions.requestTimeout, logger: DEFAULT_CREATE_SERVER_OPTIONS.logger, }, - port: 65501, - host: '::', + apiPort: 65501, + apiHost: '::', }) }) @@ -309,17 +309,17 @@ describe('resolveOptions', () => { }) }) - it('parses `--port`', () => { + it('parses `--apiPort`', () => { expect( - resolveOptions({ parseArgs: true }, ['--port', '8930']).port + resolveOptions({ parseArgs: true }, ['--apiPort', '8930']).apiPort ).toEqual(8930) }) - it("throws if `--port` can't be converted to an integer", () => { + it("throws if `--apiPort` can't be converted to an integer", () => { expect(() => { - resolveOptions({ parseArgs: true }, ['--port', 'eight-nine-ten']) + resolveOptions({ parseArgs: true }, ['--apiPort', 'eight-nine-ten']) }).toThrowErrorMatchingInlineSnapshot( - `[Error: \`port\` must be an integer]` + `[Error: \`apiPort\` must be an integer]` ) }) @@ -338,9 +338,9 @@ describe('resolveOptions', () => { ).toEqual('/bar/') }) - it('parses `--host`', () => { + it('parses `--apiHost`', () => { expect( - resolveOptions({ parseArgs: true }, ['--host', '127.0.0.1']).host + resolveOptions({ parseArgs: true }, ['--apiHost', '127.0.0.1']).apiHost ).toEqual('127.0.0.1') }) }) diff --git a/packages/api-server/src/apiCLIConfigHandler.ts b/packages/api-server/src/apiCLIConfigHandler.ts index 8b586ab97db3..28a101445824 100644 --- a/packages/api-server/src/apiCLIConfigHandler.ts +++ b/packages/api-server/src/apiCLIConfigHandler.ts @@ -1,5 +1,7 @@ import chalk from 'chalk' +import { coerceRootPath } from '@redwoodjs/fastify-web' + import { getAPIPort, getAPIHost } from './cliHelpers' import createFastifyInstance from './fastify' import { redwoodFastifyAPI } from './plugins/api' @@ -9,6 +11,8 @@ export async function handler(options: APIParsedOptions) { const timeStart = Date.now() console.log(chalk.dim.italic('Starting API Server...')) + options.apiRootPath = coerceRootPath(options.apiRootPath ?? '/') + const fastify = await createFastifyInstance() fastify.register(redwoodFastifyAPI, { redwood: { diff --git a/packages/api-server/src/createServer.ts b/packages/api-server/src/createServer.ts index 286aed7d6d81..925750bae893 100644 --- a/packages/api-server/src/createServer.ts +++ b/packages/api-server/src/createServer.ts @@ -64,7 +64,7 @@ if (!process.env.REDWOOD_ENV_FILES_LOADED) { * ``` */ export async function createServer(options: CreateServerOptions = {}) { - const { apiRootPath, fastifyServerOptions, port, host } = + const { apiRootPath, fastifyServerOptions, apiPort, apiHost } = resolveOptions(options) // Warn about `api/server.config.js` @@ -154,18 +154,18 @@ export async function createServer(options: CreateServerOptions = {}) { }) /** - * A wrapper around `fastify.listen` that handles `--port`, `REDWOOD_API_PORT` and [api].port in redwood.toml + * A wrapper around `fastify.listen` that handles `--apiPort`, `REDWOOD_API_PORT` and [api].port in redwood.toml (same for host) * * The order of precedence is: - * - `--port` + * - `--apiPort` * - `REDWOOD_API_PORT` * - [api].port in redwood.toml */ server.start = (options: StartOptions = {}) => { return server.listen({ ...options, - port, - host, + port: apiPort, + host: apiHost, }) } diff --git a/packages/api-server/src/createServerHelpers.ts b/packages/api-server/src/createServerHelpers.ts index a295f63c7495..fa6e2602f972 100644 --- a/packages/api-server/src/createServerHelpers.ts +++ b/packages/api-server/src/createServerHelpers.ts @@ -51,8 +51,8 @@ export const DEFAULT_CREATE_SERVER_OPTIONS: DefaultCreateServerOptions = { type ResolvedOptions = Required< Omit<CreateServerOptions, 'logger' | 'fastifyServerOptions' | 'parseArgs'> & { fastifyServerOptions: FastifyServerOptions - port: number - host: string + apiPort: number + apiHost: string } > @@ -60,6 +60,8 @@ export function resolveOptions( options: CreateServerOptions = {}, args?: string[] ) { + options.parseArgs ??= true + options.logger ??= DEFAULT_CREATE_SERVER_OPTIONS.logger // Set defaults. @@ -73,8 +75,8 @@ export function resolveOptions( logger: options.logger ?? DEFAULT_CREATE_SERVER_OPTIONS.logger, }, - host: getAPIHost(), - port: getAPIPort(), + apiHost: getAPIHost(), + apiPort: getAPIPort(), } // Merge fastifyServerOptions. @@ -85,10 +87,10 @@ export function resolveOptions( if (options.parseArgs) { const { values } = parseArgs({ options: { - host: { + apiHost: { type: 'string', }, - port: { + apiPort: { type: 'string', short: 'p', }, @@ -96,21 +98,22 @@ export function resolveOptions( type: 'string', }, }, + strict: false, ...(args && { args }), }) - if (values.host && typeof values.host !== 'string') { - throw new Error('`host` must be a string') + if (values.apiHost && typeof values.apiHost !== 'string') { + throw new Error('`apiHost` must be a string') } - if (values.host) { - resolvedOptions.host = values.host + if (values.apiHost) { + resolvedOptions.apiHost = values.apiHost } - if (values.port) { - resolvedOptions.port = +values.port + if (values.apiPort) { + resolvedOptions.apiPort = +values.apiPort - if (isNaN(resolvedOptions.port)) { - throw new Error('`port` must be an integer') + if (isNaN(resolvedOptions.apiPort)) { + throw new Error('`apiPort` must be an integer') } } diff --git a/packages/api-server/src/watch.ts b/packages/api-server/src/watch.ts index d1d52046b346..bad8c36b43e9 100644 --- a/packages/api-server/src/watch.ts +++ b/packages/api-server/src/watch.ts @@ -138,7 +138,7 @@ const buildAndRestart = async ({ if (serverFile) { httpServerProcess = fork( serverFile, - ['--port', port.toString()], + ['--apiPort', port.toString()], forkOpts ) } else { diff --git a/packages/api/package.json b/packages/api/package.json index abe720f694ea..6ce88caddf16 100644 --- a/packages/api/package.json +++ b/packages/api/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/api", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", diff --git a/packages/auth-providers/auth0/api/package.json b/packages/auth-providers/auth0/api/package.json index 9380412fd0b5..b8df03066329 100644 --- a/packages/auth-providers/auth0/api/package.json +++ b/packages/auth-providers/auth0/api/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/auth-auth0-api", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -31,7 +31,7 @@ "devDependencies": { "@babel/cli": "7.23.9", "@babel/core": "^7.22.20", - "@redwoodjs/api": "6.0.7", + "@redwoodjs/api": "7.0.0", "@types/jsonwebtoken": "9.0.5", "typescript": "5.3.3", "vitest": "1.2.2" diff --git a/packages/auth-providers/auth0/setup/package.json b/packages/auth-providers/auth0/setup/package.json index da9767d3176b..b1af0773d41b 100644 --- a/packages/auth-providers/auth0/setup/package.json +++ b/packages/auth-providers/auth0/setup/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/auth-auth0-setup", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -24,7 +24,7 @@ }, "dependencies": { "@babel/runtime-corejs3": "7.23.9", - "@redwoodjs/cli-helpers": "6.0.7", + "@redwoodjs/cli-helpers": "7.0.0", "core-js": "3.35.1" }, "devDependencies": { diff --git a/packages/auth-providers/auth0/web/package.json b/packages/auth-providers/auth0/web/package.json index cba077b05d8f..f4e234f923be 100644 --- a/packages/auth-providers/auth0/web/package.json +++ b/packages/auth-providers/auth0/web/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/auth-auth0-web", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -24,7 +24,7 @@ }, "dependencies": { "@babel/runtime-corejs3": "7.23.9", - "@redwoodjs/auth": "6.0.7", + "@redwoodjs/auth": "7.0.0", "core-js": "3.35.1" }, "devDependencies": { diff --git a/packages/auth-providers/azureActiveDirectory/api/package.json b/packages/auth-providers/azureActiveDirectory/api/package.json index 083ee460df2f..8417c15bd114 100644 --- a/packages/auth-providers/azureActiveDirectory/api/package.json +++ b/packages/auth-providers/azureActiveDirectory/api/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/auth-azure-active-directory-api", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -31,7 +31,7 @@ "devDependencies": { "@babel/cli": "7.23.9", "@babel/core": "^7.22.20", - "@redwoodjs/api": "6.0.7", + "@redwoodjs/api": "7.0.0", "@types/aws-lambda": "8.10.126", "@types/jsonwebtoken": "9.0.5", "typescript": "5.3.3", diff --git a/packages/auth-providers/azureActiveDirectory/setup/package.json b/packages/auth-providers/azureActiveDirectory/setup/package.json index 73a96b750efa..f069cf493109 100644 --- a/packages/auth-providers/azureActiveDirectory/setup/package.json +++ b/packages/auth-providers/azureActiveDirectory/setup/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/auth-azure-active-directory-setup", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -24,7 +24,7 @@ }, "dependencies": { "@babel/runtime-corejs3": "7.23.9", - "@redwoodjs/cli-helpers": "6.0.7", + "@redwoodjs/cli-helpers": "7.0.0", "core-js": "3.35.1" }, "devDependencies": { diff --git a/packages/auth-providers/azureActiveDirectory/web/package.json b/packages/auth-providers/azureActiveDirectory/web/package.json index 0b309a0357d8..7c73ea9a3a8d 100644 --- a/packages/auth-providers/azureActiveDirectory/web/package.json +++ b/packages/auth-providers/azureActiveDirectory/web/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/auth-azure-active-directory-web", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -24,7 +24,7 @@ }, "dependencies": { "@babel/runtime-corejs3": "7.23.9", - "@redwoodjs/auth": "6.0.7", + "@redwoodjs/auth": "7.0.0", "core-js": "3.35.1" }, "devDependencies": { diff --git a/packages/auth-providers/clerk/api/package.json b/packages/auth-providers/clerk/api/package.json index 255b5a54f6c7..c49ce7a9274e 100644 --- a/packages/auth-providers/clerk/api/package.json +++ b/packages/auth-providers/clerk/api/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/auth-clerk-api", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -30,7 +30,7 @@ "devDependencies": { "@babel/cli": "7.23.9", "@babel/core": "^7.22.20", - "@redwoodjs/api": "6.0.7", + "@redwoodjs/api": "7.0.0", "@types/aws-lambda": "8.10.126", "typescript": "5.3.3", "vitest": "1.2.2" diff --git a/packages/auth-providers/clerk/setup/package.json b/packages/auth-providers/clerk/setup/package.json index c40024ccac78..d3a746e51484 100644 --- a/packages/auth-providers/clerk/setup/package.json +++ b/packages/auth-providers/clerk/setup/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/auth-clerk-setup", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -22,7 +22,7 @@ }, "dependencies": { "@babel/runtime-corejs3": "7.23.9", - "@redwoodjs/cli-helpers": "6.0.7", + "@redwoodjs/cli-helpers": "7.0.0", "core-js": "3.35.1" }, "devDependencies": { diff --git a/packages/auth-providers/clerk/web/package.json b/packages/auth-providers/clerk/web/package.json index 16d862558610..af941e55c0cf 100644 --- a/packages/auth-providers/clerk/web/package.json +++ b/packages/auth-providers/clerk/web/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/auth-clerk-web", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -24,7 +24,7 @@ }, "dependencies": { "@babel/runtime-corejs3": "7.23.9", - "@redwoodjs/auth": "6.0.7", + "@redwoodjs/auth": "7.0.0", "core-js": "3.35.1" }, "devDependencies": { diff --git a/packages/auth-providers/custom/setup/package.json b/packages/auth-providers/custom/setup/package.json index 5ae5e8233715..45acd352e1b3 100644 --- a/packages/auth-providers/custom/setup/package.json +++ b/packages/auth-providers/custom/setup/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/auth-custom-setup", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -24,7 +24,7 @@ }, "dependencies": { "@babel/runtime-corejs3": "7.23.9", - "@redwoodjs/cli-helpers": "6.0.7", + "@redwoodjs/cli-helpers": "7.0.0", "core-js": "3.35.1" }, "devDependencies": { diff --git a/packages/auth-providers/dbAuth/api/package.json b/packages/auth-providers/dbAuth/api/package.json index a0ca9520ce8f..491334ed712f 100644 --- a/packages/auth-providers/dbAuth/api/package.json +++ b/packages/auth-providers/dbAuth/api/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/auth-dbauth-api", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -24,7 +24,7 @@ }, "dependencies": { "@babel/runtime-corejs3": "7.23.9", - "@redwoodjs/project-config": "6.0.7", + "@redwoodjs/project-config": "7.0.0", "base64url": "3.0.1", "core-js": "3.35.1", "md5": "2.3.0", @@ -33,7 +33,7 @@ "devDependencies": { "@babel/cli": "7.23.9", "@babel/core": "^7.22.20", - "@redwoodjs/api": "6.0.7", + "@redwoodjs/api": "7.0.0", "@simplewebauthn/server": "7.4.0", "@types/md5": "2.3.5", "@types/uuid": "9.0.7", diff --git a/packages/auth-providers/dbAuth/setup/package.json b/packages/auth-providers/dbAuth/setup/package.json index b2b7ac41d723..9e95813b4fdc 100644 --- a/packages/auth-providers/dbAuth/setup/package.json +++ b/packages/auth-providers/dbAuth/setup/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/auth-dbauth-setup", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -22,7 +22,7 @@ }, "dependencies": { "@babel/runtime-corejs3": "7.23.9", - "@redwoodjs/cli-helpers": "6.0.7", + "@redwoodjs/cli-helpers": "7.0.0", "@simplewebauthn/browser": "7.4.0", "core-js": "3.35.1", "prompts": "2.4.2", diff --git a/packages/auth-providers/dbAuth/web/package.json b/packages/auth-providers/dbAuth/web/package.json index 9f9611fade96..46e310dcc2b2 100644 --- a/packages/auth-providers/dbAuth/web/package.json +++ b/packages/auth-providers/dbAuth/web/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/auth-dbauth-web", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -25,7 +25,7 @@ }, "dependencies": { "@babel/runtime-corejs3": "7.23.9", - "@redwoodjs/auth": "6.0.7", + "@redwoodjs/auth": "7.0.0", "@simplewebauthn/browser": "7.4.0", "core-js": "3.35.1" }, diff --git a/packages/auth-providers/firebase/api/package.json b/packages/auth-providers/firebase/api/package.json index 276e643feb42..524f2d02aca8 100644 --- a/packages/auth-providers/firebase/api/package.json +++ b/packages/auth-providers/firebase/api/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/auth-firebase-api", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -30,7 +30,7 @@ "devDependencies": { "@babel/cli": "7.23.9", "@babel/core": "^7.22.20", - "@redwoodjs/api": "6.0.7", + "@redwoodjs/api": "7.0.0", "@types/aws-lambda": "8.10.126", "typescript": "5.3.3", "vitest": "1.2.2" diff --git a/packages/auth-providers/firebase/setup/package.json b/packages/auth-providers/firebase/setup/package.json index 62ffdb2927db..983888702503 100644 --- a/packages/auth-providers/firebase/setup/package.json +++ b/packages/auth-providers/firebase/setup/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/auth-firebase-setup", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -24,7 +24,7 @@ }, "dependencies": { "@babel/runtime-corejs3": "7.23.9", - "@redwoodjs/cli-helpers": "6.0.7", + "@redwoodjs/cli-helpers": "7.0.0", "core-js": "3.35.1" }, "devDependencies": { diff --git a/packages/auth-providers/firebase/web/package.json b/packages/auth-providers/firebase/web/package.json index 6060ff77e3ba..610015333edd 100644 --- a/packages/auth-providers/firebase/web/package.json +++ b/packages/auth-providers/firebase/web/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/auth-firebase-web", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -24,7 +24,7 @@ }, "dependencies": { "@babel/runtime-corejs3": "7.23.9", - "@redwoodjs/auth": "6.0.7", + "@redwoodjs/auth": "7.0.0", "core-js": "3.35.1" }, "devDependencies": { diff --git a/packages/auth-providers/netlify/api/package.json b/packages/auth-providers/netlify/api/package.json index f80cfa916c32..65307c7b004f 100644 --- a/packages/auth-providers/netlify/api/package.json +++ b/packages/auth-providers/netlify/api/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/auth-netlify-api", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -30,7 +30,7 @@ "devDependencies": { "@babel/cli": "7.23.9", "@babel/core": "^7.22.20", - "@redwoodjs/api": "6.0.7", + "@redwoodjs/api": "7.0.0", "@types/aws-lambda": "8.10.126", "@types/jsonwebtoken": "9.0.5", "typescript": "5.3.3", diff --git a/packages/auth-providers/netlify/setup/package.json b/packages/auth-providers/netlify/setup/package.json index f4f0b325e500..b28db05b4990 100644 --- a/packages/auth-providers/netlify/setup/package.json +++ b/packages/auth-providers/netlify/setup/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/auth-netlify-setup", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -24,7 +24,7 @@ }, "dependencies": { "@babel/runtime-corejs3": "7.23.9", - "@redwoodjs/cli-helpers": "6.0.7", + "@redwoodjs/cli-helpers": "7.0.0", "core-js": "3.35.1" }, "devDependencies": { diff --git a/packages/auth-providers/netlify/web/package.json b/packages/auth-providers/netlify/web/package.json index cc0f78dbf5ce..e911e41ccb95 100644 --- a/packages/auth-providers/netlify/web/package.json +++ b/packages/auth-providers/netlify/web/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/auth-netlify-web", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -24,7 +24,7 @@ }, "dependencies": { "@babel/runtime-corejs3": "7.23.9", - "@redwoodjs/auth": "6.0.7", + "@redwoodjs/auth": "7.0.0", "core-js": "3.35.1" }, "devDependencies": { diff --git a/packages/auth-providers/supabase/api/package.json b/packages/auth-providers/supabase/api/package.json index c4cb4b4239b7..69cd4622cc7f 100644 --- a/packages/auth-providers/supabase/api/package.json +++ b/packages/auth-providers/supabase/api/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/auth-supabase-api", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -30,7 +30,7 @@ "devDependencies": { "@babel/cli": "7.23.9", "@babel/core": "^7.22.20", - "@redwoodjs/api": "6.0.7", + "@redwoodjs/api": "7.0.0", "@types/aws-lambda": "8.10.126", "@types/jsonwebtoken": "9.0.5", "typescript": "5.3.3", diff --git a/packages/auth-providers/supabase/setup/package.json b/packages/auth-providers/supabase/setup/package.json index 0db831b27abb..becedbc1f646 100644 --- a/packages/auth-providers/supabase/setup/package.json +++ b/packages/auth-providers/supabase/setup/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/auth-supabase-setup", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -22,7 +22,7 @@ }, "dependencies": { "@babel/runtime-corejs3": "7.23.9", - "@redwoodjs/cli-helpers": "6.0.7", + "@redwoodjs/cli-helpers": "7.0.0", "core-js": "3.35.1" }, "devDependencies": { diff --git a/packages/auth-providers/supabase/web/package.json b/packages/auth-providers/supabase/web/package.json index 896f85535823..5bd16a5acae8 100644 --- a/packages/auth-providers/supabase/web/package.json +++ b/packages/auth-providers/supabase/web/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/auth-supabase-web", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", diff --git a/packages/auth-providers/supertokens/api/package.json b/packages/auth-providers/supertokens/api/package.json index 4773ed13f360..5f6d9cf7c1f8 100644 --- a/packages/auth-providers/supertokens/api/package.json +++ b/packages/auth-providers/supertokens/api/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/auth-supertokens-api", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -31,7 +31,7 @@ "devDependencies": { "@babel/cli": "7.23.9", "@babel/core": "^7.22.20", - "@redwoodjs/api": "6.0.7", + "@redwoodjs/api": "7.0.0", "@types/jsonwebtoken": "9.0.5", "typescript": "5.3.3", "vitest": "1.2.2" diff --git a/packages/auth-providers/supertokens/setup/package.json b/packages/auth-providers/supertokens/setup/package.json index 029f5aeeca47..f909283a0180 100644 --- a/packages/auth-providers/supertokens/setup/package.json +++ b/packages/auth-providers/supertokens/setup/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/auth-supertokens-setup", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -24,7 +24,7 @@ }, "dependencies": { "@babel/runtime-corejs3": "7.23.9", - "@redwoodjs/cli-helpers": "6.0.7", + "@redwoodjs/cli-helpers": "7.0.0", "core-js": "3.35.1" }, "devDependencies": { diff --git a/packages/auth-providers/supertokens/web/package.json b/packages/auth-providers/supertokens/web/package.json index 5ad326dd6e2c..57a7e59bad7c 100644 --- a/packages/auth-providers/supertokens/web/package.json +++ b/packages/auth-providers/supertokens/web/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/auth-supertokens-web", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -24,7 +24,7 @@ }, "dependencies": { "@babel/runtime-corejs3": "7.23.9", - "@redwoodjs/auth": "6.0.7", + "@redwoodjs/auth": "7.0.0", "core-js": "3.35.1" }, "devDependencies": { diff --git a/packages/auth/package.json b/packages/auth/package.json index 6a7daf42f674..ef2fa29c5233 100644 --- a/packages/auth/package.json +++ b/packages/auth/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/auth", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", diff --git a/packages/babel-config/package.json b/packages/babel-config/package.json index f0626e41446a..5fe88b09e8c7 100644 --- a/packages/babel-config/package.json +++ b/packages/babel-config/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/babel-config", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -35,7 +35,7 @@ "@babel/register": "^7.22.15", "@babel/runtime-corejs3": "7.23.9", "@babel/traverse": "^7.22.20", - "@redwoodjs/project-config": "6.0.7", + "@redwoodjs/project-config": "7.0.0", "babel-plugin-auto-import": "1.1.0", "babel-plugin-graphql-tag": "3.3.0", "babel-plugin-module-resolver": "5.0.0", @@ -45,7 +45,7 @@ "typescript": "5.3.3" }, "devDependencies": { - "@redwoodjs/framework-tools": "6.0.7", + "@redwoodjs/framework-tools": "7.0.0", "@types/babel-plugin-tester": "9.0.9", "@types/babel__core": "7.20.4", "@types/node": "20.11.10", diff --git a/packages/cli-helpers/package.json b/packages/cli-helpers/package.json index 3c4e6590b4d7..1dfabf1ef79f 100644 --- a/packages/cli-helpers/package.json +++ b/packages/cli-helpers/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/cli-helpers", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -30,8 +30,8 @@ "@babel/core": "^7.22.20", "@iarna/toml": "2.2.5", "@opentelemetry/api": "1.7.0", - "@redwoodjs/project-config": "6.0.7", - "@redwoodjs/telemetry": "6.0.7", + "@redwoodjs/project-config": "7.0.0", + "@redwoodjs/telemetry": "7.0.0", "chalk": "4.1.2", "dotenv": "16.3.1", "execa": "5.1.1", diff --git a/packages/cli-packages/dataMigrate/package.json b/packages/cli-packages/dataMigrate/package.json index 08162fd5e552..8ff1c11e774d 100644 --- a/packages/cli-packages/dataMigrate/package.json +++ b/packages/cli-packages/dataMigrate/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/cli-data-migrate", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -25,8 +25,8 @@ "test:unit": "yarn jest src" }, "dependencies": { - "@redwoodjs/babel-config": "6.0.7", - "@redwoodjs/project-config": "6.0.7", + "@redwoodjs/babel-config": "7.0.0", + "@redwoodjs/project-config": "7.0.0", "chalk": "4.1.2", "dotenv-defaults": "5.0.2", "execa": "5.1.1", @@ -37,7 +37,7 @@ }, "devDependencies": { "@prisma/client": "5.9.1", - "@redwoodjs/framework-tools": "6.0.7", + "@redwoodjs/framework-tools": "7.0.0", "@types/fs-extra": "11.0.4", "@types/yargs": "17.0.32", "jest": "29.7.0", diff --git a/packages/cli-packages/storybook/package.json b/packages/cli-packages/storybook/package.json index e3b7d86ca92c..64bd6e445234 100644 --- a/packages/cli-packages/storybook/package.json +++ b/packages/cli-packages/storybook/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/cli-storybook", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -20,9 +20,9 @@ "prepublishOnly": "NODE_ENV=production yarn build" }, "dependencies": { - "@redwoodjs/cli-helpers": "6.0.7", - "@redwoodjs/project-config": "6.0.7", - "@redwoodjs/telemetry": "6.0.7", + "@redwoodjs/cli-helpers": "7.0.0", + "@redwoodjs/project-config": "7.0.0", + "@redwoodjs/telemetry": "7.0.0", "@storybook/addon-a11y": "7.6.10", "@storybook/addon-docs": "7.6.10", "@storybook/addon-essentials": "7.6.10", @@ -34,7 +34,7 @@ "yargs": "17.7.2" }, "devDependencies": { - "@redwoodjs/framework-tools": "6.0.7", + "@redwoodjs/framework-tools": "7.0.0", "@types/yargs": "17.0.32", "tsx": "4.6.2", "typescript": "5.3.3" diff --git a/packages/cli/package.json b/packages/cli/package.json index 1bcdec46ac46..a59769f9ac23 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/cli", - "version": "6.0.7", + "version": "7.0.0", "description": "The Redwood Command Line", "repository": { "type": "git", @@ -38,15 +38,15 @@ "@opentelemetry/sdk-trace-node": "1.18.1", "@opentelemetry/semantic-conventions": "1.18.1", "@prisma/internals": "5.9.1", - "@redwoodjs/api-server": "6.0.7", - "@redwoodjs/cli-helpers": "6.0.7", - "@redwoodjs/fastify-web": "6.0.7", - "@redwoodjs/internal": "6.0.7", - "@redwoodjs/prerender": "6.0.7", - "@redwoodjs/project-config": "6.0.7", - "@redwoodjs/structure": "6.0.7", - "@redwoodjs/telemetry": "6.0.7", - "@redwoodjs/web-server": "6.0.7", + "@redwoodjs/api-server": "7.0.0", + "@redwoodjs/cli-helpers": "7.0.0", + "@redwoodjs/fastify-web": "7.0.0", + "@redwoodjs/internal": "7.0.0", + "@redwoodjs/prerender": "7.0.0", + "@redwoodjs/project-config": "7.0.0", + "@redwoodjs/structure": "7.0.0", + "@redwoodjs/telemetry": "7.0.0", + "@redwoodjs/web-server": "7.0.0", "archiver": "6.0.1", "boxen": "5.1.2", "camelcase": "6.3.0", diff --git a/packages/cli/src/commands/serve.js b/packages/cli/src/commands/serve.js index 5c9071bf7691..1696b66865cd 100644 --- a/packages/cli/src/commands/serve.js +++ b/packages/cli/src/commands/serve.js @@ -27,7 +27,7 @@ export const builder = async (yargs) => { .command({ command: '$0', description: bothServerCLIConfig.description, - builder: bothServerCLIConfig.builder(yargs), + builder: bothServerCLIConfig.builder, handler: async (argv) => { recordTelemetryAttributes({ command: 'serve', diff --git a/packages/cli/src/commands/serveApiHandler.js b/packages/cli/src/commands/serveApiHandler.js index 19cd3e03b37f..0c2bc6510c15 100644 --- a/packages/cli/src/commands/serveApiHandler.js +++ b/packages/cli/src/commands/serveApiHandler.js @@ -3,19 +3,14 @@ import execa from 'execa' import { getPaths } from '@redwoodjs/project-config' export const apiServerFileHandler = async (argv) => { - await execa( - 'yarn', - [ - 'node', - 'server.js', - '--port', - argv.port, - '--apiRootPath', - argv.apiRootPath, - ], - { - cwd: getPaths().api.dist, - stdio: 'inherit', - } - ) + const args = ['node', 'server.js', '--apiRootPath', argv.apiRootPath] + + if (argv.port) { + args.push('--apiPort', argv.port) + } + + await execa('yarn', args, { + cwd: getPaths().api.dist, + stdio: 'inherit', + }) } diff --git a/packages/cli/src/commands/serveBothHandler.js b/packages/cli/src/commands/serveBothHandler.js index 2f3e32f78956..8fdc57c7e88d 100644 --- a/packages/cli/src/commands/serveBothHandler.js +++ b/packages/cli/src/commands/serveBothHandler.js @@ -45,9 +45,9 @@ export const bothServerFileHandler = async (argv) => { [ { name: 'api', - command: `yarn node ${path.join('dist', 'server.js')} --port ${ + command: `yarn node ${path.join('dist', 'server.js')} --apiPort ${ argv.apiPort - } --host ${argv.apiHost} --api-root-path ${argv.apiRootPath}`, + } --apiHost ${argv.apiHost} --apiRootPath ${argv.apiRootPath}`, cwd: getPaths().api.base, prefixColor: 'cyan', }, diff --git a/packages/codemods/package.json b/packages/codemods/package.json index 1181757224c9..e07f3f6219ad 100644 --- a/packages/codemods/package.json +++ b/packages/codemods/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/codemods", - "version": "6.0.7", + "version": "7.0.0", "description": "Codemods to ease upgrading a RedwoodJS Project", "repository": { "type": "git", @@ -30,7 +30,7 @@ "@babel/runtime-corejs3": "7.23.9", "@babel/traverse": "^7.22.20", "@iarna/toml": "2.2.5", - "@redwoodjs/project-config": "6.0.7", + "@redwoodjs/project-config": "7.0.0", "@svgr/core": "8.0.0", "@svgr/plugin-jsx": "8.0.1", "@vscode/ripgrep": "1.15.6", diff --git a/packages/context/package.json b/packages/context/package.json index a95c24247f40..ce380316f915 100644 --- a/packages/context/package.json +++ b/packages/context/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/context", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -20,7 +20,7 @@ "prepublishOnly": "NODE_ENV=production yarn build" }, "devDependencies": { - "@redwoodjs/framework-tools": "6.0.7", + "@redwoodjs/framework-tools": "7.0.0", "tsx": "4.6.2", "typescript": "5.3.3" }, diff --git a/packages/core/package.json b/packages/core/package.json index ee39dbeecf9f..bc3081217f05 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/core", - "version": "6.0.7", + "version": "7.0.0", "description": "Foundational packages and config required to build RedwoodJS.", "repository": { "type": "git", @@ -41,12 +41,12 @@ "@babel/cli": "7.23.9", "@babel/runtime-corejs3": "7.23.9", "@pmmmwh/react-refresh-webpack-plugin": "0.5.11", - "@redwoodjs/cli": "6.0.7", - "@redwoodjs/eslint-config": "6.0.7", - "@redwoodjs/internal": "6.0.7", - "@redwoodjs/project-config": "6.0.7", - "@redwoodjs/testing": "6.0.7", - "@redwoodjs/web-server": "6.0.7", + "@redwoodjs/cli": "7.0.0", + "@redwoodjs/eslint-config": "7.0.0", + "@redwoodjs/internal": "7.0.0", + "@redwoodjs/project-config": "7.0.0", + "@redwoodjs/testing": "7.0.0", + "@redwoodjs/web-server": "7.0.0", "babel-loader": "^9.1.3", "babel-timing": "0.9.1", "copy-webpack-plugin": "11.0.0", diff --git a/packages/create-redwood-app/package.json b/packages/create-redwood-app/package.json index d5e25c5c5618..f904d0e4747b 100644 --- a/packages/create-redwood-app/package.json +++ b/packages/create-redwood-app/package.json @@ -1,6 +1,6 @@ { "name": "create-redwood-app", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -31,8 +31,8 @@ "@opentelemetry/resources": "1.18.1", "@opentelemetry/sdk-trace-node": "1.18.1", "@opentelemetry/semantic-conventions": "1.18.1", - "@redwoodjs/framework-tools": "6.0.7", - "@redwoodjs/tui": "6.0.7", + "@redwoodjs/framework-tools": "7.0.0", + "@redwoodjs/tui": "7.0.0", "@types/babel__core": "7.20.4", "chalk": "4.1.2", "check-node-version": "4.2.1", diff --git a/packages/create-redwood-app/templates/js/api/package.json b/packages/create-redwood-app/templates/js/api/package.json index 4ff4b4693ea9..aa20a71a09b1 100644 --- a/packages/create-redwood-app/templates/js/api/package.json +++ b/packages/create-redwood-app/templates/js/api/package.json @@ -3,7 +3,7 @@ "version": "0.0.0", "private": true, "dependencies": { - "@redwoodjs/api": "6.0.7", - "@redwoodjs/graphql-server": "6.0.7" + "@redwoodjs/api": "7.0.0", + "@redwoodjs/graphql-server": "7.0.0" } } diff --git a/packages/create-redwood-app/templates/js/package.json b/packages/create-redwood-app/templates/js/package.json index e74028e8ed71..eb82cb3cd099 100644 --- a/packages/create-redwood-app/templates/js/package.json +++ b/packages/create-redwood-app/templates/js/package.json @@ -7,8 +7,8 @@ ] }, "devDependencies": { - "@redwoodjs/core": "6.0.7", - "@redwoodjs/project-config": "6.0.7" + "@redwoodjs/core": "7.0.0", + "@redwoodjs/project-config": "7.0.0" }, "eslintConfig": { "extends": "@redwoodjs/eslint-config", diff --git a/packages/create-redwood-app/templates/js/web/package.json b/packages/create-redwood-app/templates/js/web/package.json index 489a42c622d1..b227f2923871 100644 --- a/packages/create-redwood-app/templates/js/web/package.json +++ b/packages/create-redwood-app/templates/js/web/package.json @@ -11,14 +11,14 @@ ] }, "dependencies": { - "@redwoodjs/forms": "6.0.7", - "@redwoodjs/router": "6.0.7", - "@redwoodjs/web": "6.0.7", + "@redwoodjs/forms": "7.0.0", + "@redwoodjs/router": "7.0.0", + "@redwoodjs/web": "7.0.0", "react": "0.0.0-experimental-e5205658f-20230913", "react-dom": "0.0.0-experimental-e5205658f-20230913" }, "devDependencies": { - "@redwoodjs/vite": "6.0.7", + "@redwoodjs/vite": "7.0.0", "@types/react": "^18.2.55", "@types/react-dom": "^18.2.19" } diff --git a/packages/create-redwood-app/templates/ts/api/package.json b/packages/create-redwood-app/templates/ts/api/package.json index 4ff4b4693ea9..aa20a71a09b1 100644 --- a/packages/create-redwood-app/templates/ts/api/package.json +++ b/packages/create-redwood-app/templates/ts/api/package.json @@ -3,7 +3,7 @@ "version": "0.0.0", "private": true, "dependencies": { - "@redwoodjs/api": "6.0.7", - "@redwoodjs/graphql-server": "6.0.7" + "@redwoodjs/api": "7.0.0", + "@redwoodjs/graphql-server": "7.0.0" } } diff --git a/packages/create-redwood-app/templates/ts/package.json b/packages/create-redwood-app/templates/ts/package.json index e74028e8ed71..eb82cb3cd099 100644 --- a/packages/create-redwood-app/templates/ts/package.json +++ b/packages/create-redwood-app/templates/ts/package.json @@ -7,8 +7,8 @@ ] }, "devDependencies": { - "@redwoodjs/core": "6.0.7", - "@redwoodjs/project-config": "6.0.7" + "@redwoodjs/core": "7.0.0", + "@redwoodjs/project-config": "7.0.0" }, "eslintConfig": { "extends": "@redwoodjs/eslint-config", diff --git a/packages/create-redwood-app/templates/ts/web/package.json b/packages/create-redwood-app/templates/ts/web/package.json index 489a42c622d1..b227f2923871 100644 --- a/packages/create-redwood-app/templates/ts/web/package.json +++ b/packages/create-redwood-app/templates/ts/web/package.json @@ -11,14 +11,14 @@ ] }, "dependencies": { - "@redwoodjs/forms": "6.0.7", - "@redwoodjs/router": "6.0.7", - "@redwoodjs/web": "6.0.7", + "@redwoodjs/forms": "7.0.0", + "@redwoodjs/router": "7.0.0", + "@redwoodjs/web": "7.0.0", "react": "0.0.0-experimental-e5205658f-20230913", "react-dom": "0.0.0-experimental-e5205658f-20230913" }, "devDependencies": { - "@redwoodjs/vite": "6.0.7", + "@redwoodjs/vite": "7.0.0", "@types/react": "^18.2.55", "@types/react-dom": "^18.2.19" } diff --git a/packages/create-redwood-app/tests/e2e.test.ts b/packages/create-redwood-app/tests/e2e.test.ts index 875d4f3ec834..c4f75af38cfb 100644 --- a/packages/create-redwood-app/tests/e2e.test.ts +++ b/packages/create-redwood-app/tests/e2e.test.ts @@ -47,7 +47,7 @@ describe('create-redwood-app', () => { expect(p.exitCode).toEqual(0) expect(p.stdout).toMatchInlineSnapshot(` - "6.0.7 + "7.0.0 [?25l[?25h" `) expect(p.stderr).toMatchInlineSnapshot(`"[?25l[?25h"`) diff --git a/packages/eslint-config/package.json b/packages/eslint-config/package.json index 2b78e8c3d40c..4a24c1f883d0 100644 --- a/packages/eslint-config/package.json +++ b/packages/eslint-config/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/eslint-config", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -16,9 +16,9 @@ "@babel/core": "^7.22.20", "@babel/eslint-parser": "7.23.10", "@babel/eslint-plugin": "7.23.5", - "@redwoodjs/eslint-plugin": "6.0.7", - "@redwoodjs/internal": "6.0.7", - "@redwoodjs/project-config": "6.0.7", + "@redwoodjs/eslint-plugin": "7.0.0", + "@redwoodjs/internal": "7.0.0", + "@redwoodjs/project-config": "7.0.0", "@typescript-eslint/eslint-plugin": "5.62.0", "@typescript-eslint/parser": "5.62.0", "eslint": "8.55.0", diff --git a/packages/eslint-plugin/package.json b/packages/eslint-plugin/package.json index 5e535b62f50b..ac99ff3d0a87 100644 --- a/packages/eslint-plugin/package.json +++ b/packages/eslint-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/eslint-plugin", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -26,7 +26,7 @@ "eslint": "8.55.0" }, "devDependencies": { - "@redwoodjs/framework-tools": "6.0.7", + "@redwoodjs/framework-tools": "7.0.0", "@types/eslint": "8", "@types/estree": "1.0.5", "@typescript-eslint/parser": "5.62.0", diff --git a/packages/forms/package.json b/packages/forms/package.json index 8c0c5e5bcd25..b99843564828 100644 --- a/packages/forms/package.json +++ b/packages/forms/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/forms", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", diff --git a/packages/framework-tools/package.json b/packages/framework-tools/package.json index c3059a0db87e..5ff61c28ee43 100644 --- a/packages/framework-tools/package.json +++ b/packages/framework-tools/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/framework-tools", - "version": "6.0.7", + "version": "7.0.0", "private": true, "repository": { "type": "git", diff --git a/packages/graphql-server/package.json b/packages/graphql-server/package.json index c0ddc8d5a655..65ca97162d91 100644 --- a/packages/graphql-server/package.json +++ b/packages/graphql-server/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/graphql-server", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -35,8 +35,8 @@ "@graphql-tools/utils": "10.0.11", "@graphql-yoga/plugin-persisted-operations": "3.1.1", "@opentelemetry/api": "1.7.0", - "@redwoodjs/api": "6.0.7", - "@redwoodjs/context": "6.0.7", + "@redwoodjs/api": "7.0.0", + "@redwoodjs/context": "7.0.0", "core-js": "3.35.1", "graphql": "16.8.1", "graphql-scalars": "1.22.4", @@ -50,8 +50,8 @@ "@babel/core": "^7.22.20", "@envelop/testing": "7.0.0", "@envelop/types": "5.0.0", - "@redwoodjs/project-config": "6.0.7", - "@redwoodjs/realtime": "6.0.7", + "@redwoodjs/project-config": "7.0.0", + "@redwoodjs/realtime": "7.0.0", "@types/jsonwebtoken": "9.0.5", "@types/lodash": "4.14.201", "@types/uuid": "9.0.7", diff --git a/packages/internal/package.json b/packages/internal/package.json index 907f731b94ea..f5abd6de1600 100644 --- a/packages/internal/package.json +++ b/packages/internal/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/internal", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -46,10 +46,10 @@ "@graphql-codegen/typescript-react-apollo": "3.3.7", "@graphql-codegen/typescript-resolvers": "3.2.1", "@graphql-tools/documents": "1.0.0", - "@redwoodjs/babel-config": "6.0.7", - "@redwoodjs/graphql-server": "6.0.7", - "@redwoodjs/project-config": "6.0.7", - "@redwoodjs/router": "6.0.7", + "@redwoodjs/babel-config": "7.0.0", + "@redwoodjs/graphql-server": "7.0.0", + "@redwoodjs/project-config": "7.0.0", + "@redwoodjs/router": "7.0.0", "@sdl-codegen/node": "0.0.10", "chalk": "4.1.2", "core-js": "3.35.1", diff --git a/packages/mailer/core/package.json b/packages/mailer/core/package.json index 70af12d23d92..7711c5f1fc8d 100644 --- a/packages/mailer/core/package.json +++ b/packages/mailer/core/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/mailer-core", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -22,8 +22,8 @@ "test:watch": "vitest watch src" }, "devDependencies": { - "@redwoodjs/api": "6.0.7", - "@redwoodjs/framework-tools": "6.0.7", + "@redwoodjs/api": "7.0.0", + "@redwoodjs/framework-tools": "7.0.0", "tsx": "4.6.2", "typescript": "5.3.3", "vitest": "1.2.2" diff --git a/packages/mailer/handlers/in-memory/package.json b/packages/mailer/handlers/in-memory/package.json index 1d31fa7aa90a..411d4d017c93 100644 --- a/packages/mailer/handlers/in-memory/package.json +++ b/packages/mailer/handlers/in-memory/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/mailer-handler-in-memory", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -20,10 +20,10 @@ "prepublishOnly": "NODE_ENV=production yarn build" }, "dependencies": { - "@redwoodjs/mailer-core": "6.0.7" + "@redwoodjs/mailer-core": "7.0.0" }, "devDependencies": { - "@redwoodjs/framework-tools": "6.0.7", + "@redwoodjs/framework-tools": "7.0.0", "tsx": "4.6.2", "typescript": "5.3.3" }, diff --git a/packages/mailer/handlers/nodemailer/package.json b/packages/mailer/handlers/nodemailer/package.json index 22ff7d15fd44..e3d567708e44 100644 --- a/packages/mailer/handlers/nodemailer/package.json +++ b/packages/mailer/handlers/nodemailer/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/mailer-handler-nodemailer", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -20,11 +20,11 @@ "prepublishOnly": "NODE_ENV=production yarn build" }, "dependencies": { - "@redwoodjs/mailer-core": "6.0.7", + "@redwoodjs/mailer-core": "7.0.0", "nodemailer": "6.9.9" }, "devDependencies": { - "@redwoodjs/framework-tools": "6.0.7", + "@redwoodjs/framework-tools": "7.0.0", "@types/nodemailer": "^6", "tsx": "4.6.2", "typescript": "5.3.3" diff --git a/packages/mailer/handlers/resend/package.json b/packages/mailer/handlers/resend/package.json index 62d6086b69da..ff2b8814ce76 100644 --- a/packages/mailer/handlers/resend/package.json +++ b/packages/mailer/handlers/resend/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/mailer-handler-resend", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -20,11 +20,11 @@ "prepublishOnly": "NODE_ENV=production yarn build" }, "dependencies": { - "@redwoodjs/mailer-core": "6.0.7", + "@redwoodjs/mailer-core": "7.0.0", "resend": "1.1.0" }, "devDependencies": { - "@redwoodjs/framework-tools": "6.0.7", + "@redwoodjs/framework-tools": "7.0.0", "tsx": "4.6.2", "typescript": "5.3.3" }, diff --git a/packages/mailer/handlers/studio/package.json b/packages/mailer/handlers/studio/package.json index e296e3310b12..c4a907711e23 100644 --- a/packages/mailer/handlers/studio/package.json +++ b/packages/mailer/handlers/studio/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/mailer-handler-studio", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -20,11 +20,11 @@ "prepublishOnly": "NODE_ENV=production yarn build" }, "dependencies": { - "@redwoodjs/mailer-core": "6.0.7", - "@redwoodjs/mailer-handler-nodemailer": "6.0.7" + "@redwoodjs/mailer-core": "7.0.0", + "@redwoodjs/mailer-handler-nodemailer": "7.0.0" }, "devDependencies": { - "@redwoodjs/framework-tools": "6.0.7", + "@redwoodjs/framework-tools": "7.0.0", "@types/nodemailer": "^6", "tsx": "4.6.2", "typescript": "5.3.3" diff --git a/packages/mailer/renderers/mjml-react/package.json b/packages/mailer/renderers/mjml-react/package.json index 5941aee40f8b..cb228651e3ad 100644 --- a/packages/mailer/renderers/mjml-react/package.json +++ b/packages/mailer/renderers/mjml-react/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/mailer-renderer-mjml-react", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -21,11 +21,11 @@ }, "dependencies": { "@faire/mjml-react": "3.3.0", - "@redwoodjs/mailer-core": "6.0.7", + "@redwoodjs/mailer-core": "7.0.0", "mjml": "4.14.1" }, "devDependencies": { - "@redwoodjs/framework-tools": "6.0.7", + "@redwoodjs/framework-tools": "7.0.0", "@types/mjml": "4", "tsx": "4.6.2", "typescript": "5.3.3" diff --git a/packages/mailer/renderers/react-email/package.json b/packages/mailer/renderers/react-email/package.json index 788d29ec4cf8..f62a69639fcc 100644 --- a/packages/mailer/renderers/react-email/package.json +++ b/packages/mailer/renderers/react-email/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/mailer-renderer-react-email", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -21,10 +21,10 @@ }, "dependencies": { "@react-email/render": "0.0.10", - "@redwoodjs/mailer-core": "6.0.7" + "@redwoodjs/mailer-core": "7.0.0" }, "devDependencies": { - "@redwoodjs/framework-tools": "6.0.7", + "@redwoodjs/framework-tools": "7.0.0", "tsx": "4.6.2", "typescript": "5.3.3" }, diff --git a/packages/prerender/package.json b/packages/prerender/package.json index 179af277c053..32328e8ff24e 100644 --- a/packages/prerender/package.json +++ b/packages/prerender/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/prerender", - "version": "6.0.7", + "version": "7.0.0", "description": "RedwoodJS prerender", "repository": { "type": "git", @@ -26,12 +26,12 @@ }, "dependencies": { "@babel/runtime-corejs3": "7.23.9", - "@redwoodjs/auth": "6.0.7", - "@redwoodjs/internal": "6.0.7", - "@redwoodjs/project-config": "6.0.7", - "@redwoodjs/router": "6.0.7", - "@redwoodjs/structure": "6.0.7", - "@redwoodjs/web": "6.0.7", + "@redwoodjs/auth": "7.0.0", + "@redwoodjs/internal": "7.0.0", + "@redwoodjs/project-config": "7.0.0", + "@redwoodjs/router": "7.0.0", + "@redwoodjs/structure": "7.0.0", + "@redwoodjs/web": "7.0.0", "@whatwg-node/fetch": "0.9.16", "babel-plugin-ignore-html-and-css-imports": "0.1.0", "cheerio": "1.0.0-rc.12", diff --git a/packages/project-config/package.json b/packages/project-config/package.json index 0dfa54e7ef6a..924bdd101213 100644 --- a/packages/project-config/package.json +++ b/packages/project-config/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/project-config", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -33,7 +33,7 @@ "string-env-interpolation": "1.0.1" }, "devDependencies": { - "@redwoodjs/framework-tools": "6.0.7", + "@redwoodjs/framework-tools": "7.0.0", "rimraf": "5.0.5", "tsx": "4.6.2", "typescript": "5.3.3", diff --git a/packages/realtime/package.json b/packages/realtime/package.json index 9db107e91943..ffba0789aa45 100644 --- a/packages/realtime/package.json +++ b/packages/realtime/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/realtime", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -38,7 +38,7 @@ "@envelop/core": "5.0.0", "@envelop/testing": "7.0.0", "@envelop/types": "5.0.0", - "@redwoodjs/framework-tools": "6.0.7", + "@redwoodjs/framework-tools": "7.0.0", "jest": "29.7.0", "nodemon": "3.0.2", "tsx": "4.6.2", diff --git a/packages/record/package.json b/packages/record/package.json index 62136baeab90..3200315982c9 100644 --- a/packages/record/package.json +++ b/packages/record/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/record", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -25,7 +25,7 @@ "dependencies": { "@babel/runtime-corejs3": "7.23.9", "@prisma/client": "5.9.1", - "@redwoodjs/project-config": "6.0.7", + "@redwoodjs/project-config": "7.0.0", "core-js": "3.35.1" }, "devDependencies": { diff --git a/packages/router/package.json b/packages/router/package.json index 3e51d959abd3..a95767f74fc5 100644 --- a/packages/router/package.json +++ b/packages/router/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/router", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -26,7 +26,7 @@ }, "dependencies": { "@babel/runtime-corejs3": "7.23.9", - "@redwoodjs/auth": "6.0.7", + "@redwoodjs/auth": "7.0.0", "core-js": "3.35.1" }, "devDependencies": { @@ -37,7 +37,7 @@ "jest": "29.7.0", "react": "0.0.0-experimental-e5205658f-20230913", "react-dom": "0.0.0-experimental-e5205658f-20230913", - "tstyche": "1.0.0-beta.9", + "tstyche": "1.0.0", "typescript": "5.3.3" }, "peerDependencies": { diff --git a/packages/structure/package.json b/packages/structure/package.json index 6ace9b2f456e..2d80baa51522 100644 --- a/packages/structure/package.json +++ b/packages/structure/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/structure", - "version": "6.0.7", + "version": "7.0.0", "description": "noun: the arrangement of and relations between the parts or elements of something complex", "repository": { "type": "git", @@ -28,7 +28,7 @@ "@babel/runtime-corejs3": "7.23.9", "@iarna/toml": "2.2.5", "@prisma/internals": "5.9.1", - "@redwoodjs/project-config": "6.0.7", + "@redwoodjs/project-config": "7.0.0", "@types/line-column": "1.0.0", "camelcase": "6.3.0", "core-js": "3.35.1", diff --git a/packages/telemetry/package.json b/packages/telemetry/package.json index e715286e4983..2531c3ccbaba 100644 --- a/packages/telemetry/package.json +++ b/packages/telemetry/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/telemetry", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -23,8 +23,8 @@ }, "dependencies": { "@babel/runtime-corejs3": "7.23.9", - "@redwoodjs/project-config": "6.0.7", - "@redwoodjs/structure": "6.0.7", + "@redwoodjs/project-config": "7.0.0", + "@redwoodjs/structure": "7.0.0", "@whatwg-node/fetch": "0.9.16", "ci-info": "4.0.0", "core-js": "3.35.1", diff --git a/packages/testing/package.json b/packages/testing/package.json index ef123e11a124..0b1b04bf77f4 100644 --- a/packages/testing/package.json +++ b/packages/testing/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/testing", - "version": "6.0.7", + "version": "7.0.0", "description": "Tools, wrappers and configuration for testing a Redwood project.", "repository": { "type": "git", @@ -29,13 +29,13 @@ }, "dependencies": { "@babel/runtime-corejs3": "7.23.9", - "@redwoodjs/auth": "6.0.7", - "@redwoodjs/babel-config": "6.0.7", - "@redwoodjs/context": "6.0.7", - "@redwoodjs/graphql-server": "6.0.7", - "@redwoodjs/project-config": "6.0.7", - "@redwoodjs/router": "6.0.7", - "@redwoodjs/web": "6.0.7", + "@redwoodjs/auth": "7.0.0", + "@redwoodjs/babel-config": "7.0.0", + "@redwoodjs/context": "7.0.0", + "@redwoodjs/graphql-server": "7.0.0", + "@redwoodjs/project-config": "7.0.0", + "@redwoodjs/router": "7.0.0", + "@redwoodjs/web": "7.0.0", "@testing-library/jest-dom": "6.3.0", "@testing-library/react": "14.1.2", "@testing-library/user-event": "14.5.2", diff --git a/packages/tui/package.json b/packages/tui/package.json index 5b7c494d3656..ebb5e7dc710f 100644 --- a/packages/tui/package.json +++ b/packages/tui/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/tui", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -26,7 +26,7 @@ "stdout-update": "1.6.8" }, "devDependencies": { - "@redwoodjs/framework-tools": "6.0.7", + "@redwoodjs/framework-tools": "7.0.0", "tsx": "4.6.2", "typescript": "5.3.3" }, diff --git a/packages/vite/package.json b/packages/vite/package.json index 6704f290655f..95a9e0678653 100644 --- a/packages/vite/package.json +++ b/packages/vite/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/vite", - "version": "6.0.7", + "version": "7.0.0", "description": "Vite configuration package for Redwood", "repository": { "type": "git", @@ -69,9 +69,9 @@ }, "dependencies": { "@babel/runtime-corejs3": "7.23.9", - "@redwoodjs/internal": "6.0.7", - "@redwoodjs/project-config": "6.0.7", - "@redwoodjs/web": "6.0.7", + "@redwoodjs/internal": "7.0.0", + "@redwoodjs/project-config": "7.0.0", + "@redwoodjs/web": "7.0.0", "@swc/core": "1.3.60", "@vitejs/plugin-react": "4.2.1", "@whatwg-node/fetch": "0.9.16", diff --git a/packages/web-server/package.json b/packages/web-server/package.json index c919a2fd64d8..9a6ddd736945 100644 --- a/packages/web-server/package.json +++ b/packages/web-server/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/web-server", - "version": "6.0.7", + "version": "7.0.0", "description": "Redwood's server for the Web side", "repository": { "type": "git", @@ -25,8 +25,8 @@ "prepublishOnly": "NODE_ENV=production yarn build" }, "dependencies": { - "@redwoodjs/fastify-web": "6.0.7", - "@redwoodjs/project-config": "6.0.7", + "@redwoodjs/fastify-web": "7.0.0", + "@redwoodjs/project-config": "7.0.0", "chalk": "4.1.2", "dotenv-defaults": "5.0.2", "fastify": "4.25.2", @@ -34,7 +34,7 @@ "yargs": "17.7.2" }, "devDependencies": { - "@redwoodjs/framework-tools": "6.0.7", + "@redwoodjs/framework-tools": "7.0.0", "tsx": "4.6.2", "typescript": "5.3.3" }, diff --git a/packages/web/package.json b/packages/web/package.json index d5ce02201de9..c8fa50fe33aa 100644 --- a/packages/web/package.json +++ b/packages/web/package.json @@ -1,6 +1,6 @@ { "name": "@redwoodjs/web", - "version": "6.0.7", + "version": "7.0.0", "repository": { "type": "git", "url": "https://github.com/redwoodjs/redwood.git", @@ -39,7 +39,7 @@ "dependencies": { "@apollo/client": "3.8.10", "@babel/runtime-corejs3": "7.23.9", - "@redwoodjs/auth": "6.0.7", + "@redwoodjs/auth": "7.0.0", "core-js": "3.35.1", "graphql": "16.8.1", "graphql-sse": "2.5.2", @@ -61,7 +61,7 @@ "nodemon": "3.0.2", "react": "0.0.0-experimental-e5205658f-20230913", "react-dom": "0.0.0-experimental-e5205658f-20230913", - "tstyche": "1.0.0-beta.9", + "tstyche": "1.0.0", "typescript": "5.3.3" }, "peerDependencies": { diff --git a/yarn.lock b/yarn.lock index 62243054566b..14067f4cd128 100644 --- a/yarn.lock +++ b/yarn.lock @@ -6525,14 +6525,14 @@ __metadata: languageName: node linkType: hard -"@playwright/test@npm:1.41.1": - version: 1.41.1 - resolution: "@playwright/test@npm:1.41.1" +"@playwright/test@npm:1.41.2": + version: 1.41.2 + resolution: "@playwright/test@npm:1.41.2" dependencies: - playwright: "npm:1.41.1" + playwright: "npm:1.41.2" bin: playwright: cli.js - checksum: 10c0/72bd5bb67c512027d214b9c54c2a22a469bd19d7809771e53a5bfdcc11330591e01579bb22f807d1ebbcdcea35d625e0fc9eb9791cebcc63bf55b82dd1cdefdd + checksum: 10c0/071fe307e7e46f550e8608ce3c2c207b7cfbda37b39f3dcbe3875eaa18e79f2a768a5795a8cfe21df9361ec63594de0359f5542dd3a3a7f6625300a98452a344 languageName: node linkType: hard @@ -7393,16 +7393,16 @@ __metadata: languageName: node linkType: hard -"@redwoodjs/api-server@npm:6.0.7, @redwoodjs/api-server@workspace:packages/api-server": +"@redwoodjs/api-server@npm:7.0.0, @redwoodjs/api-server@workspace:packages/api-server": version: 0.0.0-use.local resolution: "@redwoodjs/api-server@workspace:packages/api-server" dependencies: "@fastify/url-data": "npm:5.4.0" - "@redwoodjs/context": "npm:6.0.7" - "@redwoodjs/fastify-web": "npm:6.0.7" - "@redwoodjs/framework-tools": "npm:6.0.7" - "@redwoodjs/project-config": "npm:6.0.7" - "@redwoodjs/web-server": "npm:6.0.7" + "@redwoodjs/context": "npm:7.0.0" + "@redwoodjs/fastify-web": "npm:7.0.0" + "@redwoodjs/framework-tools": "npm:7.0.0" + "@redwoodjs/project-config": "npm:7.0.0" + "@redwoodjs/web-server": "npm:7.0.0" "@types/aws-lambda": "npm:8.10.126" "@types/lodash": "npm:4.14.201" "@types/qs": "npm:6.9.11" @@ -7427,7 +7427,7 @@ __metadata: vitest: "npm:1.2.2" yargs: "npm:17.7.2" peerDependencies: - "@redwoodjs/graphql-server": 6.0.7 + "@redwoodjs/graphql-server": 7.0.0 peerDependenciesMeta: "@redwoodjs/graphql-server": optional: true @@ -7438,7 +7438,7 @@ __metadata: languageName: unknown linkType: soft -"@redwoodjs/api@npm:6.0.7, @redwoodjs/api@workspace:packages/api": +"@redwoodjs/api@npm:7.0.0, @redwoodjs/api@workspace:packages/api": version: 0.0.0-use.local resolution: "@redwoodjs/api@workspace:packages/api" dependencies: @@ -7487,7 +7487,7 @@ __metadata: "@babel/cli": "npm:7.23.9" "@babel/core": "npm:^7.22.20" "@babel/runtime-corejs3": "npm:7.23.9" - "@redwoodjs/api": "npm:6.0.7" + "@redwoodjs/api": "npm:7.0.0" "@types/jsonwebtoken": "npm:9.0.5" core-js: "npm:3.35.1" jsonwebtoken: "npm:9.0.2" @@ -7504,7 +7504,7 @@ __metadata: "@babel/cli": "npm:7.23.9" "@babel/core": "npm:^7.22.20" "@babel/runtime-corejs3": "npm:7.23.9" - "@redwoodjs/cli-helpers": "npm:6.0.7" + "@redwoodjs/cli-helpers": "npm:7.0.0" "@types/yargs": "npm:17.0.32" core-js: "npm:3.35.1" typescript: "npm:5.3.3" @@ -7520,7 +7520,7 @@ __metadata: "@babel/cli": "npm:7.23.9" "@babel/core": "npm:^7.22.20" "@babel/runtime-corejs3": "npm:7.23.9" - "@redwoodjs/auth": "npm:6.0.7" + "@redwoodjs/auth": "npm:7.0.0" "@types/react": "npm:^18.2.55" core-js: "npm:3.35.1" react: "npm:0.0.0-experimental-e5205658f-20230913" @@ -7538,7 +7538,7 @@ __metadata: "@babel/cli": "npm:7.23.9" "@babel/core": "npm:^7.22.20" "@babel/runtime-corejs3": "npm:7.23.9" - "@redwoodjs/api": "npm:6.0.7" + "@redwoodjs/api": "npm:7.0.0" "@types/aws-lambda": "npm:8.10.126" "@types/jsonwebtoken": "npm:9.0.5" core-js: "npm:3.35.1" @@ -7556,7 +7556,7 @@ __metadata: "@babel/cli": "npm:7.23.9" "@babel/core": "npm:^7.22.20" "@babel/runtime-corejs3": "npm:7.23.9" - "@redwoodjs/cli-helpers": "npm:6.0.7" + "@redwoodjs/cli-helpers": "npm:7.0.0" "@types/yargs": "npm:17.0.32" core-js: "npm:3.35.1" typescript: "npm:5.3.3" @@ -7572,7 +7572,7 @@ __metadata: "@babel/cli": "npm:7.23.9" "@babel/core": "npm:^7.22.20" "@babel/runtime-corejs3": "npm:7.23.9" - "@redwoodjs/auth": "npm:6.0.7" + "@redwoodjs/auth": "npm:7.0.0" "@types/netlify-identity-widget": "npm:1.9.6" "@types/react": "npm:^18.2.55" core-js: "npm:3.35.1" @@ -7592,7 +7592,7 @@ __metadata: "@babel/core": "npm:^7.22.20" "@babel/runtime-corejs3": "npm:7.23.9" "@clerk/clerk-sdk-node": "npm:4.13.7" - "@redwoodjs/api": "npm:6.0.7" + "@redwoodjs/api": "npm:7.0.0" "@types/aws-lambda": "npm:8.10.126" core-js: "npm:3.35.1" typescript: "npm:5.3.3" @@ -7607,7 +7607,7 @@ __metadata: "@babel/cli": "npm:7.23.9" "@babel/core": "npm:^7.22.20" "@babel/runtime-corejs3": "npm:7.23.9" - "@redwoodjs/cli-helpers": "npm:6.0.7" + "@redwoodjs/cli-helpers": "npm:7.0.0" "@types/yargs": "npm:17.0.32" core-js: "npm:3.35.1" typescript: "npm:5.3.3" @@ -7623,7 +7623,7 @@ __metadata: "@babel/runtime-corejs3": "npm:7.23.9" "@clerk/clerk-react": "npm:4.30.3" "@clerk/types": "npm:3.60.0" - "@redwoodjs/auth": "npm:6.0.7" + "@redwoodjs/auth": "npm:7.0.0" "@types/react": "npm:^18.2.55" core-js: "npm:3.35.1" react: "npm:0.0.0-experimental-e5205658f-20230913" @@ -7641,7 +7641,7 @@ __metadata: "@babel/cli": "npm:7.23.9" "@babel/core": "npm:^7.22.20" "@babel/runtime-corejs3": "npm:7.23.9" - "@redwoodjs/cli-helpers": "npm:6.0.7" + "@redwoodjs/cli-helpers": "npm:7.0.0" "@types/yargs": "npm:17.0.32" core-js: "npm:3.35.1" typescript: "npm:5.3.3" @@ -7656,8 +7656,8 @@ __metadata: "@babel/cli": "npm:7.23.9" "@babel/core": "npm:^7.22.20" "@babel/runtime-corejs3": "npm:7.23.9" - "@redwoodjs/api": "npm:6.0.7" - "@redwoodjs/project-config": "npm:6.0.7" + "@redwoodjs/api": "npm:7.0.0" + "@redwoodjs/project-config": "npm:7.0.0" "@simplewebauthn/server": "npm:7.4.0" "@types/md5": "npm:2.3.5" "@types/uuid": "npm:9.0.7" @@ -7677,7 +7677,7 @@ __metadata: "@babel/cli": "npm:7.23.9" "@babel/core": "npm:^7.22.20" "@babel/runtime-corejs3": "npm:7.23.9" - "@redwoodjs/cli-helpers": "npm:6.0.7" + "@redwoodjs/cli-helpers": "npm:7.0.0" "@simplewebauthn/browser": "npm:7.4.0" "@simplewebauthn/typescript-types": "npm:7.4.0" "@types/yargs": "npm:17.0.32" @@ -7695,7 +7695,7 @@ __metadata: "@babel/cli": "npm:7.23.9" "@babel/core": "npm:^7.22.20" "@babel/runtime-corejs3": "npm:7.23.9" - "@redwoodjs/auth": "npm:6.0.7" + "@redwoodjs/auth": "npm:7.0.0" "@simplewebauthn/browser": "npm:7.4.0" "@simplewebauthn/typescript-types": "npm:7.4.0" "@types/react": "npm:^18.2.55" @@ -7713,7 +7713,7 @@ __metadata: "@babel/cli": "npm:7.23.9" "@babel/core": "npm:^7.22.20" "@babel/runtime-corejs3": "npm:7.23.9" - "@redwoodjs/api": "npm:6.0.7" + "@redwoodjs/api": "npm:7.0.0" "@types/aws-lambda": "npm:8.10.126" core-js: "npm:3.35.1" firebase-admin: "npm:11.11.1" @@ -7729,7 +7729,7 @@ __metadata: "@babel/cli": "npm:7.23.9" "@babel/core": "npm:^7.22.20" "@babel/runtime-corejs3": "npm:7.23.9" - "@redwoodjs/cli-helpers": "npm:6.0.7" + "@redwoodjs/cli-helpers": "npm:7.0.0" "@types/yargs": "npm:17.0.32" core-js: "npm:3.35.1" typescript: "npm:5.3.3" @@ -7744,7 +7744,7 @@ __metadata: "@babel/cli": "npm:7.23.9" "@babel/core": "npm:^7.22.20" "@babel/runtime-corejs3": "npm:7.23.9" - "@redwoodjs/auth": "npm:6.0.7" + "@redwoodjs/auth": "npm:7.0.0" "@types/react": "npm:^18.2.55" core-js: "npm:3.35.1" firebase: "npm:10.7.0" @@ -7763,7 +7763,7 @@ __metadata: "@babel/cli": "npm:7.23.9" "@babel/core": "npm:^7.22.20" "@babel/runtime-corejs3": "npm:7.23.9" - "@redwoodjs/api": "npm:6.0.7" + "@redwoodjs/api": "npm:7.0.0" "@types/aws-lambda": "npm:8.10.126" "@types/jsonwebtoken": "npm:9.0.5" core-js: "npm:3.35.1" @@ -7780,7 +7780,7 @@ __metadata: "@babel/cli": "npm:7.23.9" "@babel/core": "npm:^7.22.20" "@babel/runtime-corejs3": "npm:7.23.9" - "@redwoodjs/cli-helpers": "npm:6.0.7" + "@redwoodjs/cli-helpers": "npm:7.0.0" "@types/yargs": "npm:17.0.32" core-js: "npm:3.35.1" typescript: "npm:5.3.3" @@ -7795,7 +7795,7 @@ __metadata: "@babel/cli": "npm:7.23.9" "@babel/core": "npm:^7.22.20" "@babel/runtime-corejs3": "npm:7.23.9" - "@redwoodjs/auth": "npm:6.0.7" + "@redwoodjs/auth": "npm:7.0.0" "@types/netlify-identity-widget": "npm:1.9.6" "@types/react": "npm:^18.2.55" core-js: "npm:3.35.1" @@ -7814,7 +7814,7 @@ __metadata: "@babel/cli": "npm:7.23.9" "@babel/core": "npm:^7.22.20" "@babel/runtime-corejs3": "npm:7.23.9" - "@redwoodjs/api": "npm:6.0.7" + "@redwoodjs/api": "npm:7.0.0" "@types/aws-lambda": "npm:8.10.126" "@types/jsonwebtoken": "npm:9.0.5" core-js: "npm:3.35.1" @@ -7831,7 +7831,7 @@ __metadata: "@babel/cli": "npm:7.23.9" "@babel/core": "npm:^7.22.20" "@babel/runtime-corejs3": "npm:7.23.9" - "@redwoodjs/cli-helpers": "npm:6.0.7" + "@redwoodjs/cli-helpers": "npm:7.0.0" "@types/yargs": "npm:17.0.32" core-js: "npm:3.35.1" typescript: "npm:5.3.3" @@ -7863,7 +7863,7 @@ __metadata: "@babel/cli": "npm:7.23.9" "@babel/core": "npm:^7.22.20" "@babel/runtime-corejs3": "npm:7.23.9" - "@redwoodjs/api": "npm:6.0.7" + "@redwoodjs/api": "npm:7.0.0" "@types/jsonwebtoken": "npm:9.0.5" core-js: "npm:3.35.1" jsonwebtoken: "npm:9.0.2" @@ -7882,7 +7882,7 @@ __metadata: "@babel/cli": "npm:7.23.9" "@babel/core": "npm:^7.22.20" "@babel/runtime-corejs3": "npm:7.23.9" - "@redwoodjs/cli-helpers": "npm:6.0.7" + "@redwoodjs/cli-helpers": "npm:7.0.0" "@types/yargs": "npm:17.0.32" core-js: "npm:3.35.1" memfs: "npm:4.6.0" @@ -7898,7 +7898,7 @@ __metadata: "@babel/cli": "npm:7.23.9" "@babel/core": "npm:^7.22.20" "@babel/runtime-corejs3": "npm:7.23.9" - "@redwoodjs/auth": "npm:6.0.7" + "@redwoodjs/auth": "npm:7.0.0" "@types/react": "npm:^18.2.55" core-js: "npm:3.35.1" react: "npm:0.0.0-experimental-e5205658f-20230913" @@ -7910,7 +7910,7 @@ __metadata: languageName: unknown linkType: soft -"@redwoodjs/auth@npm:6.0.7, @redwoodjs/auth@workspace:packages/auth": +"@redwoodjs/auth@npm:7.0.0, @redwoodjs/auth@workspace:packages/auth": version: 0.0.0-use.local resolution: "@redwoodjs/auth@workspace:packages/auth" dependencies: @@ -7927,7 +7927,7 @@ __metadata: languageName: unknown linkType: soft -"@redwoodjs/babel-config@npm:6.0.7, @redwoodjs/babel-config@workspace:packages/babel-config": +"@redwoodjs/babel-config@npm:7.0.0, @redwoodjs/babel-config@workspace:packages/babel-config": version: 0.0.0-use.local resolution: "@redwoodjs/babel-config@workspace:packages/babel-config" dependencies: @@ -7944,8 +7944,8 @@ __metadata: "@babel/register": "npm:^7.22.15" "@babel/runtime-corejs3": "npm:7.23.9" "@babel/traverse": "npm:^7.22.20" - "@redwoodjs/framework-tools": "npm:6.0.7" - "@redwoodjs/project-config": "npm:6.0.7" + "@redwoodjs/framework-tools": "npm:7.0.0" + "@redwoodjs/project-config": "npm:7.0.0" "@types/babel-plugin-tester": "npm:9.0.9" "@types/babel__core": "npm:7.20.4" "@types/node": "npm:20.11.10" @@ -7967,9 +7967,9 @@ __metadata: resolution: "@redwoodjs/cli-data-migrate@workspace:packages/cli-packages/dataMigrate" dependencies: "@prisma/client": "npm:5.9.1" - "@redwoodjs/babel-config": "npm:6.0.7" - "@redwoodjs/framework-tools": "npm:6.0.7" - "@redwoodjs/project-config": "npm:6.0.7" + "@redwoodjs/babel-config": "npm:7.0.0" + "@redwoodjs/framework-tools": "npm:7.0.0" + "@redwoodjs/project-config": "npm:7.0.0" "@types/fs-extra": "npm:11.0.4" "@types/yargs": "npm:17.0.32" chalk: "npm:4.1.2" @@ -7988,15 +7988,15 @@ __metadata: languageName: unknown linkType: soft -"@redwoodjs/cli-helpers@npm:6.0.7, @redwoodjs/cli-helpers@workspace:packages/cli-helpers": +"@redwoodjs/cli-helpers@npm:7.0.0, @redwoodjs/cli-helpers@workspace:packages/cli-helpers": version: 0.0.0-use.local resolution: "@redwoodjs/cli-helpers@workspace:packages/cli-helpers" dependencies: "@babel/core": "npm:^7.22.20" "@iarna/toml": "npm:2.2.5" "@opentelemetry/api": "npm:1.7.0" - "@redwoodjs/project-config": "npm:6.0.7" - "@redwoodjs/telemetry": "npm:6.0.7" + "@redwoodjs/project-config": "npm:7.0.0" + "@redwoodjs/telemetry": "npm:7.0.0" "@types/lodash": "npm:4.14.201" "@types/pascalcase": "npm:1.0.3" "@types/yargs": "npm:17.0.32" @@ -8019,10 +8019,10 @@ __metadata: version: 0.0.0-use.local resolution: "@redwoodjs/cli-storybook@workspace:packages/cli-packages/storybook" dependencies: - "@redwoodjs/cli-helpers": "npm:6.0.7" - "@redwoodjs/framework-tools": "npm:6.0.7" - "@redwoodjs/project-config": "npm:6.0.7" - "@redwoodjs/telemetry": "npm:6.0.7" + "@redwoodjs/cli-helpers": "npm:7.0.0" + "@redwoodjs/framework-tools": "npm:7.0.0" + "@redwoodjs/project-config": "npm:7.0.0" + "@redwoodjs/telemetry": "npm:7.0.0" "@storybook/addon-a11y": "npm:7.6.10" "@storybook/addon-docs": "npm:7.6.10" "@storybook/addon-essentials": "npm:7.6.10" @@ -8038,7 +8038,7 @@ __metadata: languageName: unknown linkType: soft -"@redwoodjs/cli@npm:6.0.7, @redwoodjs/cli@workspace:packages/cli": +"@redwoodjs/cli@npm:7.0.0, @redwoodjs/cli@workspace:packages/cli": version: 0.0.0-use.local resolution: "@redwoodjs/cli@workspace:packages/cli" dependencies: @@ -8053,15 +8053,15 @@ __metadata: "@opentelemetry/sdk-trace-node": "npm:1.18.1" "@opentelemetry/semantic-conventions": "npm:1.18.1" "@prisma/internals": "npm:5.9.1" - "@redwoodjs/api-server": "npm:6.0.7" - "@redwoodjs/cli-helpers": "npm:6.0.7" - "@redwoodjs/fastify-web": "npm:6.0.7" - "@redwoodjs/internal": "npm:6.0.7" - "@redwoodjs/prerender": "npm:6.0.7" - "@redwoodjs/project-config": "npm:6.0.7" - "@redwoodjs/structure": "npm:6.0.7" - "@redwoodjs/telemetry": "npm:6.0.7" - "@redwoodjs/web-server": "npm:6.0.7" + "@redwoodjs/api-server": "npm:7.0.0" + "@redwoodjs/cli-helpers": "npm:7.0.0" + "@redwoodjs/fastify-web": "npm:7.0.0" + "@redwoodjs/internal": "npm:7.0.0" + "@redwoodjs/prerender": "npm:7.0.0" + "@redwoodjs/project-config": "npm:7.0.0" + "@redwoodjs/structure": "npm:7.0.0" + "@redwoodjs/telemetry": "npm:7.0.0" + "@redwoodjs/web-server": "npm:7.0.0" "@types/archiver": "npm:^6" archiver: "npm:6.0.1" boxen: "npm:5.1.2" @@ -8120,7 +8120,7 @@ __metadata: "@babel/runtime-corejs3": "npm:7.23.9" "@babel/traverse": "npm:^7.22.20" "@iarna/toml": "npm:2.2.5" - "@redwoodjs/project-config": "npm:6.0.7" + "@redwoodjs/project-config": "npm:7.0.0" "@svgr/core": "npm:8.0.0" "@svgr/plugin-jsx": "npm:8.0.1" "@types/babel__core": "npm:7.20.4" @@ -8151,11 +8151,11 @@ __metadata: languageName: unknown linkType: soft -"@redwoodjs/context@npm:6.0.7, @redwoodjs/context@workspace:packages/context": +"@redwoodjs/context@npm:7.0.0, @redwoodjs/context@workspace:packages/context": version: 0.0.0-use.local resolution: "@redwoodjs/context@workspace:packages/context" dependencies: - "@redwoodjs/framework-tools": "npm:6.0.7" + "@redwoodjs/framework-tools": "npm:7.0.0" tsx: "npm:4.6.2" typescript: "npm:5.3.3" languageName: unknown @@ -8168,12 +8168,12 @@ __metadata: "@babel/cli": "npm:7.23.9" "@babel/runtime-corejs3": "npm:7.23.9" "@pmmmwh/react-refresh-webpack-plugin": "npm:0.5.11" - "@redwoodjs/cli": "npm:6.0.7" - "@redwoodjs/eslint-config": "npm:6.0.7" - "@redwoodjs/internal": "npm:6.0.7" - "@redwoodjs/project-config": "npm:6.0.7" - "@redwoodjs/testing": "npm:6.0.7" - "@redwoodjs/web-server": "npm:6.0.7" + "@redwoodjs/cli": "npm:7.0.0" + "@redwoodjs/eslint-config": "npm:7.0.0" + "@redwoodjs/internal": "npm:7.0.0" + "@redwoodjs/project-config": "npm:7.0.0" + "@redwoodjs/testing": "npm:7.0.0" + "@redwoodjs/web-server": "npm:7.0.0" "@types/lodash": "npm:4.14.201" babel-loader: "npm:^9.1.3" babel-timing: "npm:0.9.1" @@ -8221,7 +8221,7 @@ __metadata: languageName: unknown linkType: soft -"@redwoodjs/eslint-config@npm:6.0.7, @redwoodjs/eslint-config@workspace:packages/eslint-config": +"@redwoodjs/eslint-config@npm:7.0.0, @redwoodjs/eslint-config@workspace:packages/eslint-config": version: 0.0.0-use.local resolution: "@redwoodjs/eslint-config@workspace:packages/eslint-config" dependencies: @@ -8229,9 +8229,9 @@ __metadata: "@babel/core": "npm:^7.22.20" "@babel/eslint-parser": "npm:7.23.10" "@babel/eslint-plugin": "npm:7.23.5" - "@redwoodjs/eslint-plugin": "npm:6.0.7" - "@redwoodjs/internal": "npm:6.0.7" - "@redwoodjs/project-config": "npm:6.0.7" + "@redwoodjs/eslint-plugin": "npm:7.0.0" + "@redwoodjs/internal": "npm:7.0.0" + "@redwoodjs/project-config": "npm:7.0.0" "@typescript-eslint/eslint-plugin": "npm:5.62.0" "@typescript-eslint/parser": "npm:5.62.0" eslint: "npm:8.55.0" @@ -8250,11 +8250,11 @@ __metadata: languageName: unknown linkType: soft -"@redwoodjs/eslint-plugin@npm:6.0.7, @redwoodjs/eslint-plugin@workspace:packages/eslint-plugin": +"@redwoodjs/eslint-plugin@npm:7.0.0, @redwoodjs/eslint-plugin@workspace:packages/eslint-plugin": version: 0.0.0-use.local resolution: "@redwoodjs/eslint-plugin@workspace:packages/eslint-plugin" dependencies: - "@redwoodjs/framework-tools": "npm:6.0.7" + "@redwoodjs/framework-tools": "npm:7.0.0" "@types/eslint": "npm:8" "@types/estree": "npm:1.0.5" "@typescript-eslint/parser": "npm:5.62.0" @@ -8266,15 +8266,15 @@ __metadata: languageName: unknown linkType: soft -"@redwoodjs/fastify-web@npm:6.0.7, @redwoodjs/fastify-web@workspace:packages/adapters/fastify/web": +"@redwoodjs/fastify-web@npm:7.0.0, @redwoodjs/fastify-web@workspace:packages/adapters/fastify/web": version: 0.0.0-use.local resolution: "@redwoodjs/fastify-web@workspace:packages/adapters/fastify/web" dependencies: "@fastify/http-proxy": "npm:9.3.0" "@fastify/static": "npm:6.12.0" "@fastify/url-data": "npm:5.4.0" - "@redwoodjs/framework-tools": "npm:6.0.7" - "@redwoodjs/project-config": "npm:6.0.7" + "@redwoodjs/framework-tools": "npm:7.0.0" + "@redwoodjs/project-config": "npm:7.0.0" fast-glob: "npm:3.3.2" fastify: "npm:4.25.2" tsx: "npm:4.6.2" @@ -8311,7 +8311,7 @@ __metadata: languageName: unknown linkType: soft -"@redwoodjs/framework-tools@npm:6.0.7, @redwoodjs/framework-tools@workspace:packages/framework-tools": +"@redwoodjs/framework-tools@npm:7.0.0, @redwoodjs/framework-tools@workspace:packages/framework-tools": version: 0.0.0-use.local resolution: "@redwoodjs/framework-tools@workspace:packages/framework-tools" dependencies: @@ -8324,7 +8324,7 @@ __metadata: languageName: unknown linkType: soft -"@redwoodjs/graphql-server@npm:6.0.7, @redwoodjs/graphql-server@workspace:packages/graphql-server": +"@redwoodjs/graphql-server@npm:7.0.0, @redwoodjs/graphql-server@workspace:packages/graphql-server": version: 0.0.0-use.local resolution: "@redwoodjs/graphql-server@workspace:packages/graphql-server" dependencies: @@ -8344,10 +8344,10 @@ __metadata: "@graphql-tools/utils": "npm:10.0.11" "@graphql-yoga/plugin-persisted-operations": "npm:3.1.1" "@opentelemetry/api": "npm:1.7.0" - "@redwoodjs/api": "npm:6.0.7" - "@redwoodjs/context": "npm:6.0.7" - "@redwoodjs/project-config": "npm:6.0.7" - "@redwoodjs/realtime": "npm:6.0.7" + "@redwoodjs/api": "npm:7.0.0" + "@redwoodjs/context": "npm:7.0.0" + "@redwoodjs/project-config": "npm:7.0.0" + "@redwoodjs/realtime": "npm:7.0.0" "@types/jsonwebtoken": "npm:9.0.5" "@types/lodash": "npm:4.14.201" "@types/uuid": "npm:9.0.7" @@ -8366,7 +8366,7 @@ __metadata: languageName: unknown linkType: soft -"@redwoodjs/internal@npm:6.0.7, @redwoodjs/internal@workspace:packages/internal": +"@redwoodjs/internal@npm:7.0.0, @redwoodjs/internal@workspace:packages/internal": version: 0.0.0-use.local resolution: "@redwoodjs/internal@workspace:packages/internal" dependencies: @@ -8389,10 +8389,10 @@ __metadata: "@graphql-codegen/typescript-react-apollo": "npm:3.3.7" "@graphql-codegen/typescript-resolvers": "npm:3.2.1" "@graphql-tools/documents": "npm:1.0.0" - "@redwoodjs/babel-config": "npm:6.0.7" - "@redwoodjs/graphql-server": "npm:6.0.7" - "@redwoodjs/project-config": "npm:6.0.7" - "@redwoodjs/router": "npm:6.0.7" + "@redwoodjs/babel-config": "npm:7.0.0" + "@redwoodjs/graphql-server": "npm:7.0.0" + "@redwoodjs/project-config": "npm:7.0.0" + "@redwoodjs/router": "npm:7.0.0" "@sdl-codegen/node": "npm:0.0.10" "@types/fs-extra": "npm:11.0.4" chalk: "npm:4.1.2" @@ -8420,12 +8420,12 @@ __metadata: languageName: unknown linkType: soft -"@redwoodjs/mailer-core@npm:6.0.7, @redwoodjs/mailer-core@workspace:packages/mailer/core": +"@redwoodjs/mailer-core@npm:7.0.0, @redwoodjs/mailer-core@workspace:packages/mailer/core": version: 0.0.0-use.local resolution: "@redwoodjs/mailer-core@workspace:packages/mailer/core" dependencies: - "@redwoodjs/api": "npm:6.0.7" - "@redwoodjs/framework-tools": "npm:6.0.7" + "@redwoodjs/api": "npm:7.0.0" + "@redwoodjs/framework-tools": "npm:7.0.0" tsx: "npm:4.6.2" typescript: "npm:5.3.3" vitest: "npm:1.2.2" @@ -8436,19 +8436,19 @@ __metadata: version: 0.0.0-use.local resolution: "@redwoodjs/mailer-handler-in-memory@workspace:packages/mailer/handlers/in-memory" dependencies: - "@redwoodjs/framework-tools": "npm:6.0.7" - "@redwoodjs/mailer-core": "npm:6.0.7" + "@redwoodjs/framework-tools": "npm:7.0.0" + "@redwoodjs/mailer-core": "npm:7.0.0" tsx: "npm:4.6.2" typescript: "npm:5.3.3" languageName: unknown linkType: soft -"@redwoodjs/mailer-handler-nodemailer@npm:6.0.7, @redwoodjs/mailer-handler-nodemailer@workspace:packages/mailer/handlers/nodemailer": +"@redwoodjs/mailer-handler-nodemailer@npm:7.0.0, @redwoodjs/mailer-handler-nodemailer@workspace:packages/mailer/handlers/nodemailer": version: 0.0.0-use.local resolution: "@redwoodjs/mailer-handler-nodemailer@workspace:packages/mailer/handlers/nodemailer" dependencies: - "@redwoodjs/framework-tools": "npm:6.0.7" - "@redwoodjs/mailer-core": "npm:6.0.7" + "@redwoodjs/framework-tools": "npm:7.0.0" + "@redwoodjs/mailer-core": "npm:7.0.0" "@types/nodemailer": "npm:^6" nodemailer: "npm:6.9.9" tsx: "npm:4.6.2" @@ -8460,8 +8460,8 @@ __metadata: version: 0.0.0-use.local resolution: "@redwoodjs/mailer-handler-resend@workspace:packages/mailer/handlers/resend" dependencies: - "@redwoodjs/framework-tools": "npm:6.0.7" - "@redwoodjs/mailer-core": "npm:6.0.7" + "@redwoodjs/framework-tools": "npm:7.0.0" + "@redwoodjs/mailer-core": "npm:7.0.0" resend: "npm:1.1.0" tsx: "npm:4.6.2" typescript: "npm:5.3.3" @@ -8472,9 +8472,9 @@ __metadata: version: 0.0.0-use.local resolution: "@redwoodjs/mailer-handler-studio@workspace:packages/mailer/handlers/studio" dependencies: - "@redwoodjs/framework-tools": "npm:6.0.7" - "@redwoodjs/mailer-core": "npm:6.0.7" - "@redwoodjs/mailer-handler-nodemailer": "npm:6.0.7" + "@redwoodjs/framework-tools": "npm:7.0.0" + "@redwoodjs/mailer-core": "npm:7.0.0" + "@redwoodjs/mailer-handler-nodemailer": "npm:7.0.0" "@types/nodemailer": "npm:^6" tsx: "npm:4.6.2" typescript: "npm:5.3.3" @@ -8486,8 +8486,8 @@ __metadata: resolution: "@redwoodjs/mailer-renderer-mjml-react@workspace:packages/mailer/renderers/mjml-react" dependencies: "@faire/mjml-react": "npm:3.3.0" - "@redwoodjs/framework-tools": "npm:6.0.7" - "@redwoodjs/mailer-core": "npm:6.0.7" + "@redwoodjs/framework-tools": "npm:7.0.0" + "@redwoodjs/mailer-core": "npm:7.0.0" "@types/mjml": "npm:4" mjml: "npm:4.14.1" tsx: "npm:4.6.2" @@ -8500,26 +8500,26 @@ __metadata: resolution: "@redwoodjs/mailer-renderer-react-email@workspace:packages/mailer/renderers/react-email" dependencies: "@react-email/render": "npm:0.0.10" - "@redwoodjs/framework-tools": "npm:6.0.7" - "@redwoodjs/mailer-core": "npm:6.0.7" + "@redwoodjs/framework-tools": "npm:7.0.0" + "@redwoodjs/mailer-core": "npm:7.0.0" tsx: "npm:4.6.2" typescript: "npm:5.3.3" languageName: unknown linkType: soft -"@redwoodjs/prerender@npm:6.0.7, @redwoodjs/prerender@workspace:packages/prerender": +"@redwoodjs/prerender@npm:7.0.0, @redwoodjs/prerender@workspace:packages/prerender": version: 0.0.0-use.local resolution: "@redwoodjs/prerender@workspace:packages/prerender" dependencies: "@babel/cli": "npm:7.23.9" "@babel/core": "npm:^7.22.20" "@babel/runtime-corejs3": "npm:7.23.9" - "@redwoodjs/auth": "npm:6.0.7" - "@redwoodjs/internal": "npm:6.0.7" - "@redwoodjs/project-config": "npm:6.0.7" - "@redwoodjs/router": "npm:6.0.7" - "@redwoodjs/structure": "npm:6.0.7" - "@redwoodjs/web": "npm:6.0.7" + "@redwoodjs/auth": "npm:7.0.0" + "@redwoodjs/internal": "npm:7.0.0" + "@redwoodjs/project-config": "npm:7.0.0" + "@redwoodjs/router": "npm:7.0.0" + "@redwoodjs/structure": "npm:7.0.0" + "@redwoodjs/web": "npm:7.0.0" "@types/mime-types": "npm:2.1.4" "@whatwg-node/fetch": "npm:0.9.16" babel-plugin-ignore-html-and-css-imports: "npm:0.1.0" @@ -8536,12 +8536,12 @@ __metadata: languageName: unknown linkType: soft -"@redwoodjs/project-config@npm:6.0.7, @redwoodjs/project-config@workspace:packages/project-config": +"@redwoodjs/project-config@npm:7.0.0, @redwoodjs/project-config@workspace:packages/project-config": version: 0.0.0-use.local resolution: "@redwoodjs/project-config@workspace:packages/project-config" dependencies: "@iarna/toml": "npm:2.2.5" - "@redwoodjs/framework-tools": "npm:6.0.7" + "@redwoodjs/framework-tools": "npm:7.0.0" deepmerge: "npm:4.3.1" fast-glob: "npm:3.3.2" rimraf: "npm:5.0.5" @@ -8552,7 +8552,7 @@ __metadata: languageName: unknown linkType: soft -"@redwoodjs/realtime@npm:6.0.7, @redwoodjs/realtime@workspace:packages/realtime": +"@redwoodjs/realtime@npm:7.0.0, @redwoodjs/realtime@workspace:packages/realtime": version: 0.0.0-use.local resolution: "@redwoodjs/realtime@workspace:packages/realtime" dependencies: @@ -8568,7 +8568,7 @@ __metadata: "@graphql-yoga/subscription": "npm:5.0.0" "@n1ru4l/graphql-live-query": "npm:0.10.0" "@n1ru4l/in-memory-live-query-store": "npm:0.10.0" - "@redwoodjs/framework-tools": "npm:6.0.7" + "@redwoodjs/framework-tools": "npm:7.0.0" graphql: "npm:16.8.1" ioredis: "npm:^5.3.2" jest: "npm:29.7.0" @@ -8592,28 +8592,28 @@ __metadata: "@babel/runtime-corejs3": "npm:7.23.9" "@prisma/client": "npm:5.9.1" "@prisma/internals": "npm:5.9.1" - "@redwoodjs/project-config": "npm:6.0.7" + "@redwoodjs/project-config": "npm:7.0.0" core-js: "npm:3.35.1" esbuild: "npm:0.20.0" vitest: "npm:1.2.2" languageName: unknown linkType: soft -"@redwoodjs/router@npm:6.0.7, @redwoodjs/router@workspace:packages/router": +"@redwoodjs/router@npm:7.0.0, @redwoodjs/router@workspace:packages/router": version: 0.0.0-use.local resolution: "@redwoodjs/router@workspace:packages/router" dependencies: "@babel/cli": "npm:7.23.9" "@babel/core": "npm:^7.22.20" "@babel/runtime-corejs3": "npm:7.23.9" - "@redwoodjs/auth": "npm:6.0.7" + "@redwoodjs/auth": "npm:7.0.0" "@types/react": "npm:^18.2.55" "@types/react-dom": "npm:^18.2.19" core-js: "npm:3.35.1" jest: "npm:29.7.0" react: "npm:0.0.0-experimental-e5205658f-20230913" react-dom: "npm:0.0.0-experimental-e5205658f-20230913" - tstyche: "npm:1.0.0-beta.9" + tstyche: "npm:1.0.0" typescript: "npm:5.3.3" peerDependencies: react: 0.0.0-experimental-e5205658f-20230913 @@ -8621,7 +8621,7 @@ __metadata: languageName: unknown linkType: soft -"@redwoodjs/structure@npm:6.0.7, @redwoodjs/structure@workspace:packages/structure": +"@redwoodjs/structure@npm:7.0.0, @redwoodjs/structure@workspace:packages/structure": version: 0.0.0-use.local resolution: "@redwoodjs/structure@workspace:packages/structure" dependencies: @@ -8630,7 +8630,7 @@ __metadata: "@babel/runtime-corejs3": "npm:7.23.9" "@iarna/toml": "npm:2.2.5" "@prisma/internals": "npm:5.9.1" - "@redwoodjs/project-config": "npm:6.0.7" + "@redwoodjs/project-config": "npm:7.0.0" "@types/fs-extra": "npm:11.0.4" "@types/line-column": "npm:1.0.0" "@types/lodash": "npm:4.14.201" @@ -8660,15 +8660,15 @@ __metadata: languageName: unknown linkType: soft -"@redwoodjs/telemetry@npm:6.0.7, @redwoodjs/telemetry@workspace:packages/telemetry": +"@redwoodjs/telemetry@npm:7.0.0, @redwoodjs/telemetry@workspace:packages/telemetry": version: 0.0.0-use.local resolution: "@redwoodjs/telemetry@workspace:packages/telemetry" dependencies: "@babel/cli": "npm:7.23.9" "@babel/core": "npm:^7.22.20" "@babel/runtime-corejs3": "npm:7.23.9" - "@redwoodjs/project-config": "npm:6.0.7" - "@redwoodjs/structure": "npm:6.0.7" + "@redwoodjs/project-config": "npm:7.0.0" + "@redwoodjs/structure": "npm:7.0.0" "@types/envinfo": "npm:7.8.3" "@types/uuid": "npm:9.0.7" "@types/yargs": "npm:17.0.32" @@ -8683,20 +8683,20 @@ __metadata: languageName: unknown linkType: soft -"@redwoodjs/testing@npm:6.0.7, @redwoodjs/testing@workspace:packages/testing": +"@redwoodjs/testing@npm:7.0.0, @redwoodjs/testing@workspace:packages/testing": version: 0.0.0-use.local resolution: "@redwoodjs/testing@workspace:packages/testing" dependencies: "@babel/cli": "npm:7.23.9" "@babel/core": "npm:^7.22.20" "@babel/runtime-corejs3": "npm:7.23.9" - "@redwoodjs/auth": "npm:6.0.7" - "@redwoodjs/babel-config": "npm:6.0.7" - "@redwoodjs/context": "npm:6.0.7" - "@redwoodjs/graphql-server": "npm:6.0.7" - "@redwoodjs/project-config": "npm:6.0.7" - "@redwoodjs/router": "npm:6.0.7" - "@redwoodjs/web": "npm:6.0.7" + "@redwoodjs/auth": "npm:7.0.0" + "@redwoodjs/babel-config": "npm:7.0.0" + "@redwoodjs/context": "npm:7.0.0" + "@redwoodjs/graphql-server": "npm:7.0.0" + "@redwoodjs/project-config": "npm:7.0.0" + "@redwoodjs/router": "npm:7.0.0" + "@redwoodjs/web": "npm:7.0.0" "@testing-library/jest-dom": "npm:6.3.0" "@testing-library/react": "npm:14.1.2" "@testing-library/user-event": "npm:14.5.2" @@ -8719,11 +8719,11 @@ __metadata: languageName: unknown linkType: soft -"@redwoodjs/tui@npm:6.0.7, @redwoodjs/tui@workspace:packages/tui": +"@redwoodjs/tui@npm:7.0.0, @redwoodjs/tui@workspace:packages/tui": version: 0.0.0-use.local resolution: "@redwoodjs/tui@workspace:packages/tui" dependencies: - "@redwoodjs/framework-tools": "npm:6.0.7" + "@redwoodjs/framework-tools": "npm:7.0.0" boxen: "npm:5.1.2" chalk: "npm:4.1.2" enquirer: "npm:2.4.1" @@ -8739,9 +8739,9 @@ __metadata: dependencies: "@babel/cli": "npm:7.23.9" "@babel/runtime-corejs3": "npm:7.23.9" - "@redwoodjs/internal": "npm:6.0.7" - "@redwoodjs/project-config": "npm:6.0.7" - "@redwoodjs/web": "npm:6.0.7" + "@redwoodjs/internal": "npm:7.0.0" + "@redwoodjs/project-config": "npm:7.0.0" + "@redwoodjs/web": "npm:7.0.0" "@swc/core": "npm:1.3.60" "@types/busboy": "npm:^1" "@types/cookie": "npm:^0" @@ -8777,13 +8777,13 @@ __metadata: languageName: unknown linkType: soft -"@redwoodjs/web-server@npm:6.0.7, @redwoodjs/web-server@workspace:packages/web-server": +"@redwoodjs/web-server@npm:7.0.0, @redwoodjs/web-server@workspace:packages/web-server": version: 0.0.0-use.local resolution: "@redwoodjs/web-server@workspace:packages/web-server" dependencies: - "@redwoodjs/fastify-web": "npm:6.0.7" - "@redwoodjs/framework-tools": "npm:6.0.7" - "@redwoodjs/project-config": "npm:6.0.7" + "@redwoodjs/fastify-web": "npm:7.0.0" + "@redwoodjs/framework-tools": "npm:7.0.0" + "@redwoodjs/project-config": "npm:7.0.0" chalk: "npm:4.1.2" dotenv-defaults: "npm:5.0.2" fastify: "npm:4.25.2" @@ -8796,7 +8796,7 @@ __metadata: languageName: unknown linkType: soft -"@redwoodjs/web@npm:6.0.7, @redwoodjs/web@workspace:packages/web": +"@redwoodjs/web@npm:7.0.0, @redwoodjs/web@workspace:packages/web": version: 0.0.0-use.local resolution: "@redwoodjs/web@workspace:packages/web" dependencies: @@ -8805,7 +8805,7 @@ __metadata: "@babel/cli": "npm:7.23.9" "@babel/core": "npm:^7.22.20" "@babel/runtime-corejs3": "npm:7.23.9" - "@redwoodjs/auth": "npm:6.0.7" + "@redwoodjs/auth": "npm:7.0.0" "@testing-library/jest-dom": "npm:6.3.0" "@testing-library/react": "npm:14.1.2" "@types/react": "npm:^18.2.55" @@ -8822,7 +8822,7 @@ __metadata: react-hot-toast: "npm:2.4.1" stacktracey: "npm:2.1.8" ts-toolbelt: "npm:9.6.0" - tstyche: "npm:1.0.0-beta.9" + tstyche: "npm:1.0.0" typescript: "npm:5.3.3" peerDependencies: react: 0.0.0-experimental-e5205658f-20230913 @@ -15913,8 +15913,8 @@ __metadata: "@opentelemetry/resources": "npm:1.18.1" "@opentelemetry/sdk-trace-node": "npm:1.18.1" "@opentelemetry/semantic-conventions": "npm:1.18.1" - "@redwoodjs/framework-tools": "npm:6.0.7" - "@redwoodjs/tui": "npm:6.0.7" + "@redwoodjs/framework-tools": "npm:7.0.0" + "@redwoodjs/tui": "npm:7.0.0" "@types/babel__core": "npm:7.20.4" chalk: "npm:4.1.2" check-node-version: "npm:4.2.1" @@ -27513,27 +27513,27 @@ __metadata: languageName: node linkType: hard -"playwright-core@npm:1.41.1": - version: 1.41.1 - resolution: "playwright-core@npm:1.41.1" +"playwright-core@npm:1.41.2": + version: 1.41.2 + resolution: "playwright-core@npm:1.41.2" bin: playwright-core: cli.js - checksum: 10c0/cdd91267ca23e3f65d519100e956859c70e3e9ca29e3fe00e700b457903129e41dfa17752f1ea37ad0a8a7c6330baf9f3be503e4cbfa3e8833e80a037f899aee + checksum: 10c0/1e80a24b0e93dd5aa643fb926d23c055f2c1a0a1e711c0d798edcfd8c3e46a6716d4ca59d72ed076191e6c713d09a0f14387d96e60f5221abd4ff65aef1ac3b3 languageName: node linkType: hard -"playwright@npm:1.41.1": - version: 1.41.1 - resolution: "playwright@npm:1.41.1" +"playwright@npm:1.41.2": + version: 1.41.2 + resolution: "playwright@npm:1.41.2" dependencies: fsevents: "npm:2.3.2" - playwright-core: "npm:1.41.1" + playwright-core: "npm:1.41.2" dependenciesMeta: fsevents: optional: true bin: playwright: cli.js - checksum: 10c0/32d48c1f8ff881770a19c9245fb4191fc36b5e97ab5f48effa0b1cf5e83fa958f6fdd7e4268dd984aa306ac5fe9e4324510211910751fb52cebb9bae819d13ca + checksum: 10c0/1b487387c1bc003291a9dbd098e8e3c6a31efbb4d7a2ce4f2bf9d5e7f9fbf4a406352ab70e5266eab9a2a858bd42d8955343ea30c0286c3912e81984aa0220a3 languageName: node linkType: hard @@ -29764,7 +29764,7 @@ __metadata: "@babel/runtime-corejs3": "npm:7.23.9" "@faker-js/faker": "npm:8.0.2" "@npmcli/arborist": "npm:7.2.2" - "@playwright/test": "npm:1.41.1" + "@playwright/test": "npm:1.41.2" "@testing-library/jest-dom": "npm:6.3.0" "@testing-library/react": "npm:14.1.2" "@testing-library/user-event": "npm:14.5.2" @@ -29803,7 +29803,7 @@ __metadata: ora: "npm:7.0.1" prompts: "npm:2.4.2" rimraf: "npm:5.0.5" - tstyche: "npm:1.0.0-beta.9" + tstyche: "npm:1.0.0" tsx: "npm:4.6.2" typescript: "npm:5.3.3" vitest: "npm:1.2.2" @@ -32238,9 +32238,9 @@ __metadata: languageName: node linkType: hard -"tstyche@npm:1.0.0-beta.9": - version: 1.0.0-beta.9 - resolution: "tstyche@npm:1.0.0-beta.9" +"tstyche@npm:1.0.0": + version: 1.0.0 + resolution: "tstyche@npm:1.0.0" peerDependencies: typescript: 4.x || 5.x peerDependenciesMeta: @@ -32248,7 +32248,7 @@ __metadata: optional: true bin: tstyche: ./build/bin.js - checksum: 10c0/2682c3f7e2d83fa0af795ba14e1c83873e3f8c31f761a8af10512c3476cf824b7ef096ba9deec3fc0e12356beaf2a20abfafcb73202db4f14c7c2877db2c5a87 + checksum: 10c0/79083a3bdc0db3cd1ba4c7205b47b9c3c0bdf5ef56db13cd1d1953dc1d8b0d4d88fa125e20f42d01b4d9e6b205c2477105e9f1908dd5451e33bb27ca327ff479 languageName: node linkType: hard