diff --git a/.circleci/config.yml b/.circleci/config.yml index fedf2b4bd6090..cff7048af7b10 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -130,58 +130,6 @@ commands: - slack/status: channel: eng-react-integration-status - run-benchmark: - parameters: - working_directory: - type: string - NUM_PAGES: - type: string - BENCHMARK_CONTENT_SOURCE: - type: string - BENCHMARK_REPO_NAME: - type: string - default: gatsbyjs/gatsby - BENCHMARK_SITE_TYPE: - type: string - default: BLOG - BENCHMARK_BRANCH: - type: string - default: master - steps: - - checkout - - run: - command: npm install - working_directory: << parameters.working_directory >> - environment: - NUM_PAGES: << parameters.NUM_PAGES >> - - run: - command: npm run build - working_directory: << parameters.working_directory >> - environment: - BENCHMARK_BUILD_TYPE: COLD_START - NUM_PAGES: << parameters.NUM_PAGES >> - BENCHMARK_CONTENT_SOURCE: << parameters.BENCHMARK_CONTENT_SOURCE >> - BENCHMARK_REPO_NAME: << parameters.BENCHMARK_REPO_NAME >> - BENCHMARK_SITE_TYPE: << parameters.BENCHMARK_SITE_TYPE >> - BENCHMARK_BRANCH: << parameters.BENCHMARK_BRANCH >> - CI_NAME: circleci - - run: - command: npm install - working_directory: << parameters.working_directory >> - environment: - NUM_PAGES: << parameters.NUM_PAGES >> - - run: - command: npm run build - working_directory: << parameters.working_directory >> - environment: - BENCHMARK_BUILD_TYPE: WARM_START - NUM_PAGES: << parameters.NUM_PAGES >> - BENCHMARK_CONTENT_SOURCE: << parameters.BENCHMARK_CONTENT_SOURCE >> - BENCHMARK_REPO_NAME: << parameters.BENCHMARK_REPO_NAME >> - BENCHMARK_SITE_TYPE: << parameters.BENCHMARK_SITE_TYPE >> - BENCHMARK_BRANCH: << parameters.BENCHMARK_BRANCH >> - CI_NAME: circleci - e2e-test: parameters: skip_file_change_test: @@ -399,6 +347,19 @@ jobs: command: yarn run update-source working_directory: ~/project/scripts/i18n + sync_translation_repo: + executor: node + steps: + - checkout + - run: git config --global user.name "GatsbyJS Bot" + - run: git config --global user.email "core-team@gatsbyjs.com" + - run: + command: yarn + working_directory: ~/project/scripts/i18n + - run: + command: yarn run-all sync + working_directory: ~/project/scripts/i18n + windows_unit_tests: executor: name: win/vs2019 @@ -439,210 +400,19 @@ jobs: paths: - "*" - benchmark_markdown_id_512: - docker: - - image: "circleci/node:12" - steps: - - run-benchmark: - working_directory: benchmarks/markdown_id - NUM_PAGES: "512" - BENCHMARK_CONTENT_SOURCE: MARKDOWN - BENCHMARK_REPO_NAME: gatsbyjs/gatsby/benchmarks/markdown_id - BENCHMARK_SITE_TYPE: BLOG - - benchmark_markdown_id_4096: - docker: - - image: "circleci/node:12" - steps: - - run-benchmark: - working_directory: benchmarks/markdown_id - NUM_PAGES: "4096" - BENCHMARK_CONTENT_SOURCE: MARKDOWN - BENCHMARK_REPO_NAME: gatsbyjs/gatsby/benchmarks/markdown_id - BENCHMARK_SITE_TYPE: BLOG - - benchmark_markdown_id_8192: - docker: - - image: "circleci/node:12" - steps: - - run-benchmark: - working_directory: benchmarks/markdown_id - NUM_PAGES: "8192" - BENCHMARK_CONTENT_SOURCE: MARKDOWN - BENCHMARK_REPO_NAME: gatsbyjs/gatsby/benchmarks/markdown_id - BENCHMARK_SITE_TYPE: BLOG - - benchmark_markdown_id_32768: - docker: - - image: "circleci/node:12" - steps: - - run-benchmark: - working_directory: benchmarks/markdown_id - NUM_PAGES: "32768" - BENCHMARK_CONTENT_SOURCE: MARKDOWN - BENCHMARK_REPO_NAME: gatsbyjs/gatsby/benchmarks/markdown_id - BENCHMARK_SITE_TYPE: BLOG - - benchmark_markdown_slug_512: - docker: - - image: "circleci/node:12" - steps: - - run-benchmark: - working_directory: benchmarks/markdown_slug - NUM_PAGES: "512" - BENCHMARK_CONTENT_SOURCE: MARKDOWN - BENCHMARK_REPO_NAME: gatsbyjs/gatsby/benchmarks/markdown_slug - BENCHMARK_SITE_TYPE: BLOG - - benchmark_markdown_slug_4096: - docker: - - image: "circleci/node:12" - steps: - - run-benchmark: - working_directory: benchmarks/markdown_slug - NUM_PAGES: "4096" - BENCHMARK_CONTENT_SOURCE: MARKDOWN - BENCHMARK_REPO_NAME: gatsbyjs/gatsby/benchmarks/markdown_slug - BENCHMARK_SITE_TYPE: BLOG - - benchmark_markdown_slug_8192: - docker: - - image: "circleci/node:12" - steps: - - run-benchmark: - working_directory: benchmarks/markdown_slug - NUM_PAGES: "8192" - BENCHMARK_CONTENT_SOURCE: MARKDOWN - BENCHMARK_REPO_NAME: gatsbyjs/gatsby/benchmarks/markdown_slug - BENCHMARK_SITE_TYPE: BLOG - - benchmark_markdown_slug_32768: - docker: - - image: "circleci/node:12" - steps: - - run-benchmark: - working_directory: benchmarks/markdown_slug - NUM_PAGES: "32768" - BENCHMARK_CONTENT_SOURCE: MARKDOWN - BENCHMARK_REPO_NAME: gatsbyjs/gatsby/benchmarks/markdown_slug - BENCHMARK_SITE_TYPE: BLOG - - benchmark_markdown_table_512: - docker: - - image: "circleci/node:12" - steps: - - run-benchmark: - working_directory: benchmarks/markdown_table - NUM_PAGES: "512" - BENCHMARK_CONTENT_SOURCE: MARKDOWN - BENCHMARK_REPO_NAME: gatsbyjs/gatsby/benchmarks/markdown_table - BENCHMARK_SITE_TYPE: BLOG - - benchmark_markdown_table_4096: - docker: - - image: "circleci/node:12" - steps: - - run-benchmark: - working_directory: benchmarks/markdown_table - NUM_PAGES: "4096" - BENCHMARK_CONTENT_SOURCE: MARKDOWN - BENCHMARK_REPO_NAME: gatsbyjs/gatsby/benchmarks/markdown_table - BENCHMARK_SITE_TYPE: BLOG - - benchmark_markdown_table_8192: - docker: - - image: "circleci/node:12" - steps: - - run-benchmark: - working_directory: benchmarks/markdown_table - NUM_PAGES: "8192" - BENCHMARK_CONTENT_SOURCE: MARKDOWN - BENCHMARK_REPO_NAME: gatsbyjs/gatsby/benchmarks/markdown_table - BENCHMARK_SITE_TYPE: BLOG - - benchmark_markdown_table_32768: - docker: - - image: "circleci/node:12" - steps: - - run-benchmark: - working_directory: benchmarks/markdown_table - NUM_PAGES: "32768" - BENCHMARK_CONTENT_SOURCE: MARKDOWN - BENCHMARK_REPO_NAME: gatsbyjs/gatsby/benchmarks/markdown_table - BENCHMARK_SITE_TYPE: BLOG - - benchmark_mdx_512: - docker: - - image: "circleci/node:12" - steps: - - run-benchmark: - working_directory: benchmarks/mdx - NUM_PAGES: "512" - BENCHMARK_CONTENT_SOURCE: MDX - BENCHMARK_REPO_NAME: gatsbyjs/gatsby/benchmarks/mdx - BENCHMARK_SITE_TYPE: BLOG - - benchmark_mdx_4096: - docker: - - image: "circleci/node:12" - steps: - - run-benchmark: - working_directory: benchmarks/mdx - NUM_PAGES: "4096" - BENCHMARK_CONTENT_SOURCE: MDX - BENCHMARK_REPO_NAME: gatsbyjs/gatsby/benchmarks/mdx - BENCHMARK_SITE_TYPE: BLOG - - benchmark_mdx_8192: - docker: - - image: "circleci/node:12" - steps: - - run-benchmark: - working_directory: benchmarks/mdx - NUM_PAGES: "8192" - BENCHMARK_CONTENT_SOURCE: MDX - BENCHMARK_REPO_NAME: gatsbyjs/gatsby/benchmarks/mdx - BENCHMARK_SITE_TYPE: BLOG - - benchmark_mdx_32768: - docker: - - image: "circleci/node:12" - steps: - - run-benchmark: - working_directory: benchmarks/mdx - NUM_PAGES: "32768" - BENCHMARK_CONTENT_SOURCE: MDX - BENCHMARK_REPO_NAME: gatsbyjs/gatsby/benchmarks/mdx - BENCHMARK_SITE_TYPE: BLOG - workflows: version: 2 - benchmark: - jobs: - - benchmark_markdown_id_512 - - benchmark_markdown_id_4096 - - benchmark_markdown_id_8192 - - benchmark_markdown_id_32768 - - benchmark_markdown_slug_512 - - benchmark_markdown_slug_4096 - - benchmark_markdown_slug_8192 - - benchmark_markdown_slug_32768 - - benchmark_markdown_table_512 - - benchmark_markdown_table_4096 - - benchmark_markdown_table_8192 - - benchmark_markdown_table_32768 - - benchmark_mdx_512 - - benchmark_mdx_4096 - - benchmark_mdx_8192 - - benchmark_mdx_32768 + weekly-i18n-sync: triggers: - schedule: - cron: 22 16 * * * + cron: "0 1 * * 6" filters: branches: only: - master + jobs: + - sync_translation_repo nightly-react-next: triggers: diff --git a/benchmarks/source-contentful/README.md b/benchmarks/source-contentful/README.md index c1b09f04c6655..25e876bbf3e91 100644 --- a/benchmarks/source-contentful/README.md +++ b/benchmarks/source-contentful/README.md @@ -7,16 +7,28 @@ Those individual article pages and the homepage share a common "Layout" componen ## Setup Contentful benchmark site -1. TODO +1. Setup will-it-build data source 2. Copy `.env.example` to `.env.development` and make sure all variables are set 3. Run `yarn setup` Note that the script is idempotent, so you can re-run it on failures. Also use `yarn setup --skip [N:number]` to skip first `N` articles -(for example articles created during a previous run) +(for example articles created during a previous run which failed) + +### Fixing broken images + +Sometimes Contentful silently fails to process images which causes builds to fail. +Use following approach to fix those: + +1. Run `yarn site find-broken-images` +2. Change image URLs in will-it-build dataset for this site to some other images + (or just use one of the larger sites and set `BENCHMARK_SITE_ID` appropriately) +3. Run `yarn site fix-broken-images imageid1 imageid2 imageid3` + This command updates broken images with images from the `BENCHMARK_SITE_ID` dataset ## Build a site -1. Copy `.env.example` to `.env.production` and make sure all variables are set -2. Run `yarn build` +1. Copy `.env.example` to `.env.production` +2. Set `BENCHMARK_CONTENTFUL_SPACE_ID` and `BENCHMARK_CONTENTFUL_ACCESS_TOKEN` variables +3. Run `yarn build` diff --git a/benchmarks/source-contentful/bin/setup.js b/benchmarks/source-contentful/bin/site.js similarity index 63% rename from benchmarks/source-contentful/bin/setup.js rename to benchmarks/source-contentful/bin/site.js index e53e762865da5..633179c15a97b 100644 --- a/benchmarks/source-contentful/bin/setup.js +++ b/benchmarks/source-contentful/bin/site.js @@ -2,6 +2,7 @@ const fs = require(`fs`) const path = require(`path`) const contentful = require(`contentful-management`) const chalk = require("chalk") +const yargs = require("yargs") require("dotenv").config({ path: `.env.${process.env.NODE_ENV}`, @@ -29,12 +30,43 @@ const contentfulConfig = { const { spaceId, managementToken } = contentfulConfig if (!spaceId || !managementToken) { - console.error( - `Contentful space id and management API token are required.` - ) + console.error(`Contentful space id and management API token are required.`) process.exit(1) } +yargs + .scriptName("site") + .usage("$0 [arguments]") + .command({ + command: `setup [--skip=number]`, + desc: `Setup new Contentful Benchmark Site from the dataset`, + builder: yargs => + yargs.option(`skip`, { + type: `number`, + default: 0, + description: `Skip this number of entries`, + }), + handler: ({ skip = 0 }) => { + runSetup({ skip }).catch(console.error) + }, + }) + .command({ + command: "find-broken-images", + desc: `Find broken images in current contentful site`, + handler: () => { + runFindBrokenImages().catch(console.error) + }, + }) + .command({ + command: "fix-broken-images ", + desc: `Fix images found by find-broken-images`, + handler: ({ ids }) => { + runFixBrokenImages(ids).catch(console.error) + }, + }) + .demandCommand(1) + .help().argv + /** * Transforms an article from source dataset to contentful data model */ @@ -201,16 +233,7 @@ async function createArticle(env, articleData) { } } -const resolveSkip = () => { - const index = process.argv.findIndex(param => param === `--skip`) - if (index >= 0) { - const skipValue = process.argv[index + 1] - return Number(skipValue) || 0 - } - return 0 -} - -async function createEntries(env) { +async function createEntries({ env, skip = 0 }) { const processBatch = sourceArticles => Promise.all( sourceArticles.map(async sourceArticle => { @@ -219,14 +242,12 @@ async function createEntries(env) { const articleCreated = await createArticle(env, article) console.log( `Processed ${chalk.green(article.sys.id)} (` + - `asset ${assetCreated ? `created` : chalk.yellow(`exists`)}, ` + - `article ${articleCreated ? `created` : chalk.yellow(`exists`)})` + `asset ${assetCreated ? `created` : chalk.yellow(`exists`)}, ` + + `article ${articleCreated ? `created` : chalk.yellow(`exists`)})` ) }) ) - const skip = resolveSkip() - if (skip) { console.log(`Skipping first ${chalk.yellow(skip)} articles`) } @@ -248,7 +269,45 @@ async function createEntries(env) { } } -async function run() { +async function updateAssets({ env, assetIds }) { + for await (const sourceArticle of readSourceArticles(inputDir)) { + const { asset: assetData } = extractEntities(sourceArticle) + if (assetIds.has(assetData.sys.id)) { + try { + let asset = await env.getAsset(assetData.sys.id) + try { + asset = await asset.unpublish() + } catch (e) {} + asset = await asset.delete() + asset = await createAsset(env, assetData) + console.log(`Updated asset ${chalk.yellow(assetData.sys.id)}`) + } catch (e) { + console.warn(`Could not update asset ${chalk.yellow(assetData.sys.id)}`) + console.log(e) + } + } + } +} + +async function findBrokenImages(env) { + let assets + let skip = 0 + let ids = [] + + do { + assets = await env.getAssets({ skip }) + for (let asset of assets.items) { + const details = asset.fields.file[`en-US`].details + if (!details || !details.image || !details.image.width) { + ids.push(asset.sys.id) + } + } + skip = assets.skip + assets.limit + } while (assets && assets.items.length > 0) + return ids +} + +async function createClient() { const client = contentful.createClient({ accessToken: contentfulConfig.managementToken, }) @@ -256,20 +315,41 @@ async function run() { const space = await client.getSpace(contentfulConfig.spaceId) const env = await space.getEnvironment(`master`) - // Create content model only: - createContentModel(env) - .then(() => { - console.log(`Content model ${chalk.green(`created`)}`) - }) - .then(() => createEntries(env)) - .then(() => { - console.log( - `All set! You can now run ${chalk.yellow( - "gatsby develop" - )} to see the site in action.` - ) - }) - .catch(error => console.error(error)) + return { client, space, env } +} + +async function runSetup({ skip }) { + const { env } = await createClient() + + await createContentModel(env) + console.log(`Content model ${chalk.green(`created`)}`) + await createEntries({ env, skip }) + + console.log( + `All set! You can now run ${chalk.yellow( + "gatsby develop" + )} to see the site in action.` + ) +} + +async function runFindBrokenImages() { + const { env } = await createClient() + const ids = await findBrokenImages(env) + if (ids.length) { + console.log(chalk.yellow(`Broken images:`)) + console.log(ids.join(` `)) + console.log(``) + } else { + console.log(chalk.green(`No broken images!`)) + } } -run().catch(console.error) +async function runFixBrokenImages(ids) { + if (!ids.length) { + console.log(`Nothing to do: no broken images!`) + return + } + const { env } = await createClient() + console.log(`Fixing ${chalk.yellow(ids.length)} broken images`) + await updateAssets({ env, assetIds: new Set(ids) }) +} diff --git a/benchmarks/source-contentful/package.json b/benchmarks/source-contentful/package.json index b52d69b4da110..fe26a7a92cb4e 100644 --- a/benchmarks/source-contentful/package.json +++ b/benchmarks/source-contentful/package.json @@ -10,7 +10,8 @@ "develop": "gatsby develop", "format": "prettier --write \"**/*.{js,jsx,json,md}\"", "serve": "gatsby serve", - "setup": "cross-env NODE_ENV=development node bin/setup.js", + "site": "cross-env NODE_ENV=development node bin/site.js", + "setup": "cross-env NODE_ENV=development node bin/site.js setup", "start": "npm run develop", "test": "echo \"Write tests! -> https://gatsby.dev/unit-testing\" && exit 1" }, @@ -31,7 +32,8 @@ "chalk": "^2.4.2", "cross-env": "^7.0.0", "gatsby-plugin-benchmark-reporting": "*", - "prettier": "^1.19.1" + "prettier": "^1.19.1", + "yargs": "^15.3.1" }, "repository": { "type": "git", diff --git a/benchmarks/source-wordpress/gatsby-config.js b/benchmarks/source-wordpress/gatsby-config.js index 525ca36d03827..18756833d2749 100644 --- a/benchmarks/source-wordpress/gatsby-config.js +++ b/benchmarks/source-wordpress/gatsby-config.js @@ -1,5 +1,5 @@ require("dotenv").config({ - path: `.env`, + path: `.env.${process.env.NODE_ENV}`, }) module.exports = { diff --git a/benchmarks/source-wordpress/package.json b/benchmarks/source-wordpress/package.json index 6a539b989e503..49777d30c3245 100644 --- a/benchmarks/source-wordpress/package.json +++ b/benchmarks/source-wordpress/package.json @@ -12,18 +12,14 @@ "serve": "gatsby serve", "start": "npm run develop" }, - "resolutions": { - "sharp": "0.25.1" - }, "dependencies": { "dotenv": "^8.2.0", "gatsby": "^2.19.35", "gatsby-image": "^2.2.40", "gatsby-plugin-sharp": "^2.4.5", "gatsby-source-filesystem": "^2.1.48", - "gatsby-source-wordpress-experimental": "^0.0.15", + "gatsby-source-wordpress-experimental": "^0.0.29", "gatsby-transformer-sharp": "^2.3.14", - "lodash.kebabcase": "^4.1.1", "react": "^16.12.0", "react-dom": "^16.12.0" }, diff --git a/docs/blog/100days/comments/index.md b/docs/blog/100days/comments/index.md index 7ed004db19959..4241969707482 100644 --- a/docs/blog/100days/comments/index.md +++ b/docs/blog/100days/comments/index.md @@ -22,9 +22,9 @@ There are multiple ways to accept user-generated content on your site. So, inste #### Option 1. Embed comments using JavaScript. -_Pros and cons_: This approach is the simplest option for adding comments to your site. You can choose many low cost or free comment services, like Discus and Facebook Comments. However, this method will have a negative impact on your site performance, and has privacy implications for your users. +_Pros and cons_: This approach is the simplest option for adding comments to your site. You can choose many low cost or free comment services, like Disqus and Facebook Comments. However, this method will have a negative impact on your site performance, and has privacy implications for your users. -_Tutorial_: [Using Javascript-based comment service, Discus with Gatsby](/docs/adding-comments/#using-disqus-for-comments) +_Tutorial_: [Using Javascript-based comment service, Disqus with Gatsby](/docs/adding-comments/#using-disqus-for-comments) #### Option 2. Add comments to your content diff --git a/docs/blog/100days/create-themes/index.md b/docs/blog/100days/create-themes/index.md new file mode 100644 index 0000000000000..95c8564b91615 --- /dev/null +++ b/docs/blog/100days/create-themes/index.md @@ -0,0 +1,29 @@ +--- +title: Challenge 15 - Publish Website Kits with Gatsby Themes +date: 2020-04-07 +author: "Hashim Warren" +excerpt: "Learn how to create a Gatsby Theme" +tags: ["learning-to-code", "100-Days-of-Gatsby"] +--- + +_Gatsby was named the [#1 new technology to learn in 2020](https://www.cnbc.com/2019/12/02/10-hottest-tech-skills-that-could-pay-off-most-in-2020-says-new-report.html)!_ + +_To help you learn about Gatsby and our ecosystem, we invite you to take the #100DaysOfGatsby challenge! If you are new to Gatsby, you can follow along with our beginner-friendly weekly prompts. Or if you are familiar with Gatsby and want to go deeper, we invite you to [build your first Gatsby Theme](/docs/themes/building-themes/)._ + +_Learn more about [#100DaysOfGatsby here](/blog/100days)!_ + +## Challenge 15: Encapsulate and Share Your Work with a Gatsby Theme + +This week you'll learn how to create a Gatsby Theme! Themes are a neat way to wrap your site development work into a package that can be used by others on your team, or the open source community. + +The resources below will help you to creat your first theme, and learn from themes that the Gatsby community has published. + +### Gatsby Theme Creation Resources + +Start with this free [video course on authoring Gatsby Themes](https://egghead.io/courses/gatsby-theme-authoring). While you watch the videos, use this [written Gatsby Theme guide](/tutorial/building-a-theme/), which is meant as a companion to the course. + +For inspiration, investigate the repos of the projects [that were showcased during Theme Jam](https://themejam.gatsbyjs.org/showcase), our hackathon for Gatsby Themes. + +### What to Do If You Need Help + +If you get stuck during the challenge, you can ask for help from the [Gatsby community](/contributing/community/) and the [ASKGatsbyJS](https://twitter.com/AskGatsbyJS) Twitter account. You can find fellow Gatsby Developers on [Discord](https://discordapp.com/invite/gatsby), [Reddit](https://www.reddit.com/r/gatsbyjs/), [Spectrum](https://spectrum.chat/gatsby-js), and [Dev](https://dev.to/t/gatsby). diff --git a/docs/blog/2018-04-10-how-to-handle-comments-in-gatsby-blogs/index.md b/docs/blog/2018-04-10-how-to-handle-comments-in-gatsby-blogs/index.md index 362adc97b5b40..962d68b66bc29 100644 --- a/docs/blog/2018-04-10-how-to-handle-comments-in-gatsby-blogs/index.md +++ b/docs/blog/2018-04-10-how-to-handle-comments-in-gatsby-blogs/index.md @@ -30,13 +30,13 @@ Putting the comments inside Git is awesome. Your content, your site, no external ### Staticman -But how do you get them there? Enter [staticman](https://staticman.net/). You give staticman access to your GitHub repo. You create a form. Staticman creates a pull request. It also supports akismet spam filtering. Genius. Oh, and it's free. Yes, doubly fantastic. +But how do you get them there? Enter [Staticman](https://staticman.net/). You give Staticman access to your GitHub repo. You create a form. Staticman creates a pull request. It also supports akismet spam filtering. Genius. Oh, and it's free. Yes, doubly fantastic. Staticman is open source. You can run your own instance. If you really want to. It's probably not worth it. Their service is free. They don't "own" your data. If they disappear, your comments live on. ### Roll your own -You could create your own staticman alternative. You could use a framework like [serverless](https://serverless.com/). You can find free hosts for serverless. You can write your own custom anti spam logic. This will be hard. It will probably be fun. It's not a very good idea! +You could create your own Staticman alternative. You could use a framework like [serverless](https://serverless.com/). You can find free hosts for serverless. You can write your own custom anti spam logic. This will be hard. It will probably be fun. It's not a very good idea! ### Netlify @@ -56,6 +56,6 @@ The possibilities are limitless. ## Conclusion -Use [staticman](https://staticman.net/). It's the best all round option. Keep control of your content. If their service goes down, you keep your comments. You can run your own instance later if necessary. It includes support for akismet spam filtering. +Use [Staticman](https://staticman.net/). It's the best all round option. Keep control of your content. If their service goes down, you keep your comments. You can run your own instance later if necessary. It includes support for akismet spam filtering. -**To see staticman in action, leave a comment at [Gatsby Central](https://www.gatsbycentral.com)!** +**To see Staticman in action, leave a comment at [Gatsby Central](https://www.gatsbycentral.com)!** diff --git a/docs/blog/2020-02-19-how-to-build-multilingual-sites-with-gatsby/index.md b/docs/blog/2020-02-19-how-to-build-multilingual-sites-with-gatsby/index.md index bb468d79b35ea..5125cf06ce626 100644 --- a/docs/blog/2020-02-19-how-to-build-multilingual-sites-with-gatsby/index.md +++ b/docs/blog/2020-02-19-how-to-build-multilingual-sites-with-gatsby/index.md @@ -15,7 +15,7 @@ The first part of the article will be CMS agnostic. For the basics, it doesn't m ## ISO Codes or Codenames -When you work with multiple languages, each content piece needs to be flagged with a language. Languages like English and German make this straightforward, you can use their ISO codes as flags. But sometimes you need to distinguish between countries that speak the same language, such as Portugal and Brazil. We also have ISO codes for countries, and together with their language they form a language code (en-US, cs-CZ, and so on). Many CMSs use that to identify content for that specific part of the world. It's important to be able to extend this list, though. +When you work with multiple languages, each content piece needs to be flagged with a language. Languages like English and German make this straightforward, you can use their ISO codes as flags. But sometimes you need to distinguish between countries that speak the same language, such as Portugal and Brazil. We also have ISO codes for countries, and together with their language they form a language code (`en-US`, `cs-CZ`, and so on). Many CMSs use that to identify content for that specific part of the world. It's important to be able to extend this list, though. Why? @@ -61,27 +61,27 @@ To illustrate these steps, I used a sample intranet app that was built on Gatsby The app contains a list of employees and a profile page for every one of them. -Before we jump into code, it's essential to mention plugins. They greatly help with handling multilingual websites in Gatsby, so it would be a waste of time to try and reinvent the wheel here. For the full list, see [Gatsby docs](https://www.gatsbyjs.org/docs/localization-i18n/). I chose the [gatsby-plugin-i18n](https://github.com/angeloocana/gatsby-plugin-i18n) as it is capable of automatic locale identification, handles language-specific static pages, and lets me configure the basics. +Before we jump into code, it's essential to mention plugins. They greatly help with handling multilingual websites in Gatsby, so it would be a waste of time to try and reinvent the wheel here. For the full list, see [Gatsby docs](/docs/localization-i18n/). I chose the [gatsby-plugin-i18n](https://github.com/angeloocana/gatsby-plugin-i18n) as it is capable of automatic locale identification, handles language-specific static pages, and lets me configure the basics. ```js:title=gatsby-config.js { -  resolve: `gatsby-plugin-i18n`, -  options: { -  langKeyDefault: 'en', -  langKeyForNull: 'en', -  prefixDefault: false, -  useLangKeyLayout: false, -  }, - }, + resolve: `gatsby-plugin-i18n`, +  options: { +  langKeyDefault: 'en', +  langKeyForNull: 'en', +  prefixDefault: false, +  useLangKeyLayout: false, +  }, +}, ``` -This configuration (in `gatsby-config.js`) tells the plugin to use 'en' as the default language code (`langKeyDefault`, `langKeyForNull`) and no prefix (`prefixDefault`). The last option (`useLangKeyLayout`) specifies that the used layout is language invariant. +This configuration (in `gatsby-config.js`) tells the plugin to use `'en'` as the default language code (`langKeyDefault`, `langKeyForNull`) and no prefix (`prefixDefault`). The last option (`useLangKeyLayout`) specifies that the used layout is language invariant. ### Localize URLs Right at the beginning, we need to think about URLs. Do you want the language code in the URL all the time? Should the default language be accessible without language code? -```js +```text ╔════════════════════════════════════════════════════╗ ║ All languages use language code in URL ║ ╠════════════════════════════════════════════════════╣ @@ -115,54 +115,58 @@ If your website is not Google Maps, people most likely won't share their locatio As I mentioned already, the intranet app contains profile pages for all employees. They are generated dynamically because, well, the list of employees is also dynamic. This code piece that sits in `gatsby-node.js`'s `createPages` generates pages in the original implementation: -```js -query peoplePortalList { +```jsx +graphql(` + query peoplePortalList { allKontentItemPerson() { - nodes { - elements { - urlslug { - value - } - } - } - } - } - ... - for (const person of nodes) { - createPage({ - path: `employees/${person.elements.urlslug.value}`, - component: path.resolve(`./src/templates/person.js`), - context: { - slug: person.elements.urlslug.value, - }, - }); - } + nodes { + elements { + urlslug { + value + } + } + } + } + } +`) + +// ... + +for (const person of nodes) { + createPage({ + path: `employees/${person.elements.urlslug.value}`, + component: path.resolve(`./src/templates/person.js`), + context: { + slug: person.elements.urlslug.value, + }, + }) +} ``` We will need to adjust both the GraphQL query and the code that generates pages. -During the build time, the [Kontent source plugin](https://www.gatsbyjs.org/docs/sourcing-from-kentico-kontent/) generates one Gatsby node per each content item - language combination. To distinguish these nodes, they always contain two fields: +During the build time, the [Kontent source plugin](/docs/sourcing-from-kentico-kontent/) generates one Gatsby node per each content item - language combination. To distinguish these nodes, they always contain two fields: - **preferred_language** This field is mainly used for filtering and describes the language the item is intended for. For example, if you want Czech content, you want to filter for `preferred_language='cs'` provided you use the codename `'cs'` for Czech. - **system.language** - This is the actual content item language. If you filter items based on `preferred_language='cs'`, you will get `system.language='cs'` if the item is translated. Otherwise, the item content will be in English, and `system.language` will be 'default' (here, it's English). + This is the actual content item language. If you filter items based on `preferred_language='cs'`, you will get `system.language='cs'` if the item is translated. Otherwise, the item content will be in English, and `system.language` will be `'default'` (here, it's English). In my case, I am happy with language fallbacks for items that are not translated. That means I can use `preferred_language` and treat all items as if they were translated. -```js +```graphql query PeoplePortalList { - allKontentItemPerson() { - nodes { - elements { - urlslug { - value - } - }, - preferred_language + allKontentItemPerson() { + nodes { + elements { + urlslug { + value } + }, + preferred_language } + } } ``` @@ -171,30 +175,38 @@ If you don't want to display items that fallback to parent language, just compar Let's define here a new variable `lang` that will hold the language code for the current item's `preferred_language`. I will use it in the `createPage` method call to place the newly generated page on the right URL. ```js -let lang = `${person.preferred_language}/` -if (person.preferred_language === "default") { - lang = "/" +for (const person of nodes) { + let lang = `${person.preferred_language}/` + if (person.preferred_language === "default") { + lang = "/" + } + createPage({ + path: `${lang}employees/${person.elements.urlslug.value}`, + component: path.resolve(`./src/templates/person.js`), + context: { + slug: person.elements.urlslug.value, + lang: person.preferred_language, + }, + }) } -createPage({ - path: `${lang}employees/${person.elements.urlslug.value}`, - component: path.resolve(`./src/templates/person.js`), - context: { - slug: person.elements.urlslug.value, - lang: person.preferred_language, - }, -}) ``` ### Generate Language-specific Static Pages Apart from dynamic pages, there are always some static pages. They include `index.js` and `employees.js` that handle the homepage and employees page respectively. The `gatsby-plugin-i18n` will place them on the right language-specific URLs if you follow the defined language convention-the filename suffix needs to contain the language code. -- index.js -> index.en.js, index.cs.js -- employees.js -> employees.en.js, employees.cs.js +- `index.js` -> + + - `index.en.js` + - `index.cs.js` + +- `employees.js` -> + - `employees.en.js` + - `employees.cs.js` It's also necessary to adjust the content of each of the new files to reflect its new language. That includes component properties. Take a look at this part of my index.js: -```js:title=index.js +```jsx:title=index.js @@ -202,7 +214,7 @@ It's also necessary to adjust the content of each of the new files to reflect it Once this file becomes `index.cs.js`, I need to adjust it to: -```js:title=index.cs.js +```jsx:title=index.cs.js @@ -216,11 +228,11 @@ There are multiple ways to handle the language propagation. They are ranging fro I always aim to keep things simple. In my case, the language travels through components from top to bottom. The language-specific `index` page defines the language code for the `IndexContent` component. If a child component requires the current language, it will receive it from its parent the same way. -```js +```jsx function Content({ classes, lang }) { - ... - - ... + ... + + ... } ``` @@ -228,7 +240,7 @@ The last part of this multilingual adjustment tutorial is the language selector. A very simple implementation featuring just two languages (Czech and English) can look like this: -```js +```jsx import PropTypes from "prop-types" import React, { Component } from "react" import { Location } from "@reach/router" @@ -265,4 +277,4 @@ You see that adding multiple languages to sites is not as simple as it seems at Have you published a multilingual website on Gatsby? Let me know how you implemented it and what your experience was like on [Twitter](https://twitter.com/ondrabus). -You can also check out [this implementation on GitHub in a separate branch of the forked source repository](https://github.com/ondrabus/kontent-sample-app-gatsby-intranet/tree/multilingual) or the [Kontent source plugin page](https://www.gatsbyjs.org/docs/sourcing-from-kentico-kontent/) in the docs. +You can also check out [this implementation on GitHub in a separate branch of the forked source repository](https://github.com/ondrabus/kontent-sample-app-gatsby-intranet/tree/multilingual) or the [Kontent source plugin page](/docs/sourcing-from-kentico-kontent/) in the docs. diff --git a/docs/blog/2020-03-26-service-relief-project/index.md b/docs/blog/2020-03-26-service-relief-project/index.md index fdabc87dc8794..3bcbc88304732 100644 --- a/docs/blog/2020-03-26-service-relief-project/index.md +++ b/docs/blog/2020-03-26-service-relief-project/index.md @@ -20,13 +20,13 @@ Wonderfully, though, fundraisers are popping up all over the place to provide so **In this post:** -- [About the project](#about) -- [How it all began](#beginnings) -- [Assembling a team](#team) -- [Building a prototype](#prototype) -- [Get Involved](#help) +- [About the project](#about-the-project) +- [How it all began](#how-it-all-began) +- [Assembling a team](#assembling-a-team) +- [Building a prototype](#building-a-prototype) +- [How to Get Involved](#how-to-get-involved) -### About the project +### About the project The Service Relief Project is a boilerplate for launching blazing-fast zero-cost directories of local fundraisers to help your local businesses affected by mandated shutdowns around the world. @@ -59,7 +59,7 @@ Here are a few of the sites that have already launched using this project: - [Sioux Falls Service Relief](https://siouxfallsservicerelief.com/) - [Asheville Service Relief](https://ashevilleservicerelief.com/) -### How it all began +### How it all began On Monday, March 16th, Gatsby made the decision to close down for two days. Even though we're a completely remote company, we chose to do this to give our team members the time they needed to care for their families and adjust to the new world we found ourselves in. @@ -75,7 +75,7 @@ That's where Gatsby came in. For the Seattle Service Relief site, I generated a That was Monday. All day Tuesday I kept pondering the Seattle Service Relief project and wondering if there might be something bigger that we could do with the website. Though a palpable feeling of uncertainty permeated any and all conversations with my fellow Gatsby-ites, there was also a chord strumming just loudly and universally with the desire to do something, anything to help as a company. -### Assembling a team +### Assembling a team I am grateful to be surrounded by coworkers that are deeply passionate about helping others and, after a quick brainstorm with [Dustin Schau](https://twitter.com/SchauDustin) (Head of Product, Gatsby), I decided to float the idea in our **#random** Slack channel at work to see if anyone else was interested in helping out: @@ -89,7 +89,7 @@ The response I recieved was incredible. Folks from every team at Gatsby chimed i Inspired by the support, I built -### Building a prototype +### Building a prototype At 6:30pm CST on March 18th, 8 members of the Gatsby team jumped on a Zoom call after-hours to discuss how we could help make this project a reality. @@ -117,7 +117,7 @@ In our off-hours over the next 5 days, **6 team members made 76 commits, merged Within 48 hours of launching the Service Relief Project site and starter, local fundraising information sites have been launched in _eleven_ more states beyond the initial Seattle site! From Provo, Utah, to Sioux Falls, South Dakota and from Austin, Texas to Asheville, North Carolina - and half a dozen more places in between - people have been able to use our project to quickly launch their own service worker relief sites. -### How to Get Involved +### How to Get Involved What started as a project to help a friend help his community has turned into a worldwide project to help small businesses affected by the mandated shutdowns around the world. diff --git a/docs/blog/2020-04-01-LA-2020-Mathews/index.md b/docs/blog/2020-04-01-LA-2020-Mathews/index.md index 0fb9e392be058..fce94ccc988a4 100644 --- a/docs/blog/2020-04-01-LA-2020-Mathews/index.md +++ b/docs/blog/2020-04-01-LA-2020-Mathews/index.md @@ -11,7 +11,7 @@ tags: - community --- -_Welcome to the Gatsby Days 2020 Video Blog: Los Angeles Edition. In this series of eleven videos, you can catch up with all the wit and wisdom shared at the presentations from our February community gathering in LA. If you weren’t able to make it in person, these videos are the next best thing to owning a time machine! (Though owning a time machine would be super cool, joining us at our next Gatsby Days—currently scheduled for October 19th in Amsterdam—would also be pretty awesome)._ +_Welcome to the Gatsby Days 2020 Video Blog: Los Angeles Edition. In this series of eleven videos, you can catch up with all the wit and wisdom shared at the presentations from our February community gathering in LA. If you weren’t able to make it in person, these videos are the next best thing to owning a time machine! (Though owning a time machine would be super cool for sure, joining us at our next Gatsby Days—currently scheduled as a virtual event June 2nd-3rd—would be pretty awesome, too 💜. Follow [Gatsby on Twitter](https://twitter.com/gatsbyjs) to keep up with when registration starts, speaker announcements and other developments)._ As the co-founder and CEO of Gatsby, Kyle Mathews kicked off Gatsby Days LA 2020 with a keynote address that spanned Gatsby’s past, present, and future. Learn what sparked Kyle and co-founder Sam Bhagwat to develop a framework that could combine the best of static-site generation, content management system (CMS), and React technologies. Next, hear how Gatsby today is helping produce amazing developer experiences for building ridiculously fast websites. diff --git a/docs/blog/2020-04-02-LA-2020-Schau/index.md b/docs/blog/2020-04-02-LA-2020-Schau/index.md index ee7132188c16b..71b1a2dc2c807 100644 --- a/docs/blog/2020-04-02-LA-2020-Schau/index.md +++ b/docs/blog/2020-04-02-LA-2020-Schau/index.md @@ -12,9 +12,9 @@ tags: - gatsby-cloud --- -_Welcome to the Gatsby Days 2020 Video Blog: Los Angeles Edition. In this series of eleven videos, you can catch up with all the presentations from our February community gathering in LA. If you weren’t able to make it in person, these videos are the next best thing to owning a time machine! (Though owning a time machine would be super cool, joining us at our next Gatsby Days—currently scheduled for October 19th in Amsterdam—would also be pretty awesome)._ +_Welcome to the Gatsby Days 2020 Video Blog: Los Angeles Edition. In this series of eleven videos, you can catch up with all the wit and wisdom shared at the presentations from our February community gathering in LA. If you weren’t able to make it in person, these videos are the next best thing to owning a time machine! (Though owning a time machine would be super cool for sure, joining us at our next Gatsby Days—currently scheduled as a virtual event June 2nd-3rd—would be pretty awesome, too 💜. Follow [Gatsby on Twitter](https://twitter.com/gatsbyjs) to keep up with when registration starts, speaker announcements and other developments)._ -Dustin Shau, Gatsby's Head of Product, is a dedicated open source developer. He was first drawn to the framework by Gatsby’s exceptional performance and outstanding developer experience. Now a Gatsby team member, Dustin is focused on making Gatsby the fastest, most inclusive platform for building websites and web applications. +Dustin Schau, Gatsby's Head of Product, is a dedicated open source developer. He was first drawn to the framework by Gatsby’s exceptional performance and outstanding developer experience. Now a Gatsby team member, Dustin is focused on making Gatsby the fastest, most inclusive platform for building websites and web applications. Watch Dustin demonstrate the impressive build-time performance of Gatsby and Gatsby Cloud by walking through typical developer workflows, including updating a website using Contentful's headless CMS and then rapidly deploying the results through a content delivery network (CDN). And discover how Gatsby enables developers to quickly incorporate accessibility capabilities—such as a SkipNav function—to deliver inclusive web experiences to the widest possible audience. diff --git a/docs/blog/2020-04-05-LA-2020-Gladwell/index.md b/docs/blog/2020-04-05-LA-2020-Gladwell/index.md new file mode 100644 index 0000000000000..667ffa074c8b1 --- /dev/null +++ b/docs/blog/2020-04-05-LA-2020-Gladwell/index.md @@ -0,0 +1,20 @@ +--- +title: "Gatsby Days LA 2020 Video 3: Slash Build Times with Gatsby Builds Best Practices" +date: 2020-04-05 +author: Greg Thomas +excerpt: "React developer Grant Glidewell tells how he identified best practices for using Gatsby Builds to cut build time for image-heavy websites by a factor of five." +tags: + - gatsby-days + - community + - performance + - drupal + - gatsby-cloud +--- + +_Welcome to the Gatsby Days 2020 Video Blog: Los Angeles Edition. In this series of eleven videos, you can catch up with all the wit and wisdom shared at the presentations from our February community gathering in LA. If you weren’t able to make it in person, these videos are the next best thing to owning a time machine! (Though owning a time machine would be super cool for sure, joining us at our next Gatsby Days—currently scheduled as a virtual event June 2nd-3rd—would be pretty awesome, too 💜. Follow [Gatsby on Twitter](https://twitter.com/gatsbyjs) to keep up with when registration starts, speaker announcements and other developments)._ + +Grant Glidewell is a React developer dedicated to employing modern technologies in cutting-edge projects. During his time at the digital experience agency Third and Grove, Grant was part of the team that integrated Drupal and Gatsby to create the agency’s fast, yet content-rich website. He also helped build Gatsby Preview + Drupal—a live preview module for Drupal that can be used with Gatsby Cloud. + +At Gatsby Days LA 2020, Grant focused on performance. After analyzing top Gatsby-based sites, he and his team identified best practices that can help developers achieve blazing fast speeds. Check out this video of Grant’s presentation to discover how Gatsby Builds helped Third and Grove reduce build times for its image-heavy site from about 40 minutes to only 8. And learn how Gatsby’s lazy load components can deliver impressive performance benefits without excessive coding. + +[![Gatsby Days LA Video 3: What separates the fastest Gatsby sites from everybody else with Grant Glidewell](https://res.cloudinary.com/marcomontalbano/image/upload/v1585858632/video_to_markdown/images/youtube--xMorT50I0cw-c05b58ac6eb4c4700831b2b3070cd403.jpg)](https://www.youtube.com/watch?v=xMorT50I0cw "Gatsby Days LA Video 3: What separates the fastest Gatsby sites from everybody else with Grant Glidewell") diff --git a/docs/blog/2020-04-06-LA-2020-Gordon/index.md b/docs/blog/2020-04-06-LA-2020-Gordon/index.md new file mode 100644 index 0000000000000..fc8f13eb7b286 --- /dev/null +++ b/docs/blog/2020-04-06-LA-2020-Gordon/index.md @@ -0,0 +1,20 @@ +--- +title: "Gatsby Days LA 2020 Video 4: Methodologies for Building Highly Dynamic WordPress Sites Using Gatsby" +date: 2020-04-06 +author: Greg Thomas +excerpt: "What are the best options for integrating comments and other dynamic elements into your WordPress website? Zac Gordon evaluates key approaches." +tags: + - gatsby-days + - community + - performance + - drupal + - gatsby-cloud +--- + +_Welcome to the Gatsby Days 2020 Video Blog: Los Angeles Edition. In this series of eleven videos, you can catch up with all the wit and wisdom shared at the presentations from our February community gathering in LA. If you weren’t able to make it in person, these videos are the next best thing to owning a time machine! (Though owning a time machine would be super cool for sure, joining us at our next Gatsby Days—currently scheduled as a virtual event June 2nd-3rd—would be pretty awesome, too 💜. Follow [Gatsby on Twitter](https://twitter.com/gatsbyjs) to keep up with when registration starts, speaker announcements and other developments)._ + +Zac Gordon is a community builder at [Strattic](https://www.strattic.com/) the serverless WordPress host Strattic and an educator who has taught courses on building with JavaScript and Gatsby in WordPress for high schools, colleges, bootcamps, and online learning sites. He is particularly excited to discuss ways the evolving [content mesh](https://www.gatsbyjs.org/blog/2018-10-04-journey-to-the-content-mesh/) is pushing WordPress and the web forward by enabling developers to capitalize on best-of-breed microservices. + +At Gatsby Days LA 2020, Zac explored developer options for building highly dynamic, rich WordPress sites. First focusing on ways to incorporate comments into sites, Zac examined native, custom coding, and SaaS approaches. Learn the pros and cons of each option and discover what similar options exist for implementing additional dynamic elements, including forms, memberships, and e-commerce capabilities to websites. + +[![Gatsby Days LA Video 4: Using Gatsby to Build Highly Dynamic WordPress Sites, with Zac Gordon](https://res.cloudinary.com/marcomontalbano/image/upload/v1585860921/video_to_markdown/images/youtube--O22FNh8XXT8-c05b58ac6eb4c4700831b2b3070cd403.jpg)](https://www.youtube.com/watch?v=O22FNh8XXT8 "Gatsby Days LA Video 4: Using Gatsby to Build Highly Dynamic WordPress Sites, with Zac Gordon") diff --git a/docs/blog/2020-04-07-LA-2020-Boss/index.md b/docs/blog/2020-04-07-LA-2020-Boss/index.md new file mode 100644 index 0000000000000..31920ba6c0659 --- /dev/null +++ b/docs/blog/2020-04-07-LA-2020-Boss/index.md @@ -0,0 +1,21 @@ +--- +title: "Gatsby Days LA 2020 Video 5: How Tinder Used Gatsby to Build a WordPress Site in React (Instead of PHP)" +date: 2020-04-07 +author: Greg Thomas +excerpt: "Kyle Boss, web developer at Tinder, explains how Gatsby enables developers to set up and use React as a front end for WordPress—avoiding the use of PHP almost entirely." +tags: + - gatsby-days + - wordpress + - react + - blogs + - case-studies + - serverless +--- + +_Welcome to the Gatsby Days 2020 Video Blog: Los Angeles Edition. In this series of eleven videos, you can catch up with all the wit and wisdom shared at the presentations from our February community gathering in LA. If you weren’t able to make it in person, these videos are the next best thing to owning a time machine! (Though owning a time machine would be super cool for sure, joining us at our next Gatsby Days—currently scheduled as a virtual event June 2nd-3rd—would be pretty awesome, too 💜. Follow [Gatsby on Twitter](https://twitter.com/gatsbyjs) to keep up with when registration starts, speaker announcements and other developments)._ + +Kyle Boss is a Tinder web developer who was presented with a daunting challenge on his very first day at the dating app company: build a new WordPress blog site that can help people navigate online/browser and app-based dating. The problem? Kyle is a React developer and wasn’t eager to use PHP for this new site 🙀. Fortunately, he learned that Tinder had already decided to use Gatsby as a bridge between WordPress and React. + +In his presentation at Gatsby Days LA 2020, Kyle explains how Gatsby enables developers to set up and use React as a front end for WordPress—avoiding the use of PHP almost entirely. Discover how this approach empowered content creators to benefit from the ease of using WordPress while empowering developers to work with their preferred tools. And learn how Tinder’s use of Gatsby also helped the company avoid the dreaded “succulent apocalypse” of 2019. + +[![Gatsby Days LA Video 5: How Tinder Used Gatsby to Build a WordPress Site in React (Instead of PHP)](https://res.cloudinary.com/marcomontalbano/image/upload/v1586206693/video_to_markdown/images/youtube--9SrHm3cWk9g-c05b58ac6eb4c4700831b2b3070cd403.jpg)](https://www.youtube.com/watch?v=9SrHm3cWk9g "Gatsby Days LA Video 5: How Tinder Used Gatsby to Build a WordPress Site in React (Instead of PHP)") diff --git a/docs/blog/2020-04-07-virtual-gatsby-days-2020-CFP/index.md b/docs/blog/2020-04-07-virtual-gatsby-days-2020-CFP/index.md new file mode 100644 index 0000000000000..5b7a42c38bdbc --- /dev/null +++ b/docs/blog/2020-04-07-virtual-gatsby-days-2020-CFP/index.md @@ -0,0 +1,18 @@ +--- +title: "Gatsby Days Virtual Edition: Call for Proposals is Open!" +date: 2020-04-07 +author: Laci Texter +excerpt: "Our first virtual Gatsby Days is coming June 2-3, 2020. It will be live-streamed on YouTube from 1:00-3:00pm EST each day, and now is the time to send in your speaker proposal!" +tags: + - gatsby-days + - community + - gatsby-cloud +--- + +Whether we gather in person or online, Gatsby Days conferences are all about the collective experiences and perspectives of the amazing Gatsby community 💜. + +The next Gatsby Days will take place on June 2-3, 2020, from 9:00am-12:00pm PST each day. This is a virtual event and will be live-streamed on YouTube. This means all speakers will need a computer, decent internet connection, and some kind of microphone/camera setup (the camera in your computer is great!) All speakers will receive a \$500.00 stipend from Gatsby following the conference. + +Please make sure to [submit your proposal](https://docs.google.com/forms/d/e/1FAIpQLSfjUpqpmRL18ydo_PmC4jxvPG8xhOlix43KeRHOhUbPp3u7Mw/viewform?usp=sf_link) NO LATER THAN Friday, April 24th, 2020 at 11:59pm PST. We are reviewing submissions as they come in, so submitting early is an advantage. + +Learn more about Virtual Gatsby Days and previous Gatsby Days events on our website: https://www.gatsbyjs.com/resources/gatsby-days diff --git a/docs/blog/2020-04-08-LA-2020-Harmon/index.md b/docs/blog/2020-04-08-LA-2020-Harmon/index.md new file mode 100644 index 0000000000000..c528ce6d91d32 --- /dev/null +++ b/docs/blog/2020-04-08-LA-2020-Harmon/index.md @@ -0,0 +1,21 @@ +--- +title: "Gatsby Days LA 2020 Video 6: Set Up and Sell Things Fast with Gatsby + Shopify" +date: 2020-04-08 +author: Greg Thomas +excerpt: "Trevor Harmon from Shopify gives a step-by-step walkthrough for a sample site, showing how to set up a Shopify store, then pull data into Gatsby and use it to create pages." +tags: + - gatsby-days + - ecommerce + - shopify + - plug-ins +--- + +_Welcome to the Gatsby Days 2020 Video Blog: Los Angeles Edition. In this series of eleven videos, you can catch up with all the wit and wisdom shared at the presentations from our February community gathering in LA. If you weren’t able to make it in person, these videos are the next best thing to owning a time machine! (Though owning a time machine would be super cool for sure, joining us at our next Gatsby Days—currently scheduled as a virtual event June 2nd-3rd—would be pretty awesome, too 💜. Follow [Gatsby on Twitter](https://twitter.com/gatsbyjs) to keep up with when registration starts, speaker announcements and other developments)._ + +Speed is everything in e-commerce. A single extra second of load time can cost an online retailer 7% in lost sales every year. To keep customers engaged and revenues flowing, you need to ensure your site is fast. + +Trevor Harmon is a front-end web developer at Shopify who is passionate about using technology to solve real-world challenges—particularly the challenge of launching successful e-commerce websites. To be successful, these sites need to be _fast_. + +Watch Trevor’s presentation to learn how harness Shopify and Gatsby together to produce blazing-fast e-commerce sites. Trevor provides a detailed, step-by-step walkthrough for setting up a sample site, showing how to set up a Shopify store, pull data into Gatsby, use that data to create pages, and implement a Shopify plugin to manage checkout. + +[![Gatsby Days LA Video 6: Set Up and Sell Things Fast with Gatsby and Shopify - Trevor Harmon - Gatsby Days LA 2020](https://res.cloudinary.com/marcomontalbano/image/upload/v1586210368/video_to_markdown/images/youtube--tUtuGAFOjYI-c05b58ac6eb4c4700831b2b3070cd403.jpg)](https://www.youtube.com/watch?v=tUtuGAFOjYI "Gatsby Days LA Video 6: Set Up and Sell Things Fast with Gatsby and Shopify - Trevor Harmon - Gatsby Days LA 2020") diff --git a/docs/blog/2020-04-09-LA-2020-mcmahon/index.md b/docs/blog/2020-04-09-LA-2020-mcmahon/index.md new file mode 100644 index 0000000000000..c89401247eb34 --- /dev/null +++ b/docs/blog/2020-04-09-LA-2020-mcmahon/index.md @@ -0,0 +1,20 @@ +--- +title: "Gatsby Days LA 2020 Video 7: Improving React Component Library Documentation with Gatsby and MDX" +date: 2020-04-09 +author: Greg Thomas +excerpt: "Kathleen McMahon, a software engineer at O’Reilly Media, tells the Gatsby Days LA 2020 audience how her team facilitated contributions and improved accessibility for her company’s component library documentation using Gatsby and MDX." +tags: + - gatsby-days + - accessibility + - react + - plug-ins + - mdx +--- + +_Welcome to the Gatsby Days 2020 Video Blog: Los Angeles Edition. In this series of eleven videos, you can catch up with all the wit and wisdom shared at the presentations from our February community gathering in LA. If you weren’t able to make it in person, these videos are the next best thing to owning a time machine! (Though owning a time machine would be super cool for sure, joining us at our next Gatsby Days—currently scheduled as a virtual event June 2nd-3rd—would be pretty awesome, too 💜. Follow [Gatsby on Twitter](https://twitter.com/gatsbyjs) to keep up with when registration starts, speaker announcements and other developments)._ + +Kathleen McMahon is a senior front-end software engineer and the tech lead for O’Reilly Media’s Design System team. Recently, her team decided it was time to rework the design system’s component library documentation. The primary goals: encourage more contributions by making the documentation process more user friendly and improve accessibility so more users could take advantage of the resulting documentation. + +Her team’s solution was to incorporate Gatsby with MDX into the documentation tooling. Learn how Kathleen’s team implemented these technologies and discover ways to customize your own implementation using several Gatsby plugins. Hear how the Design System team O’Reilly Media has been able to reduce barriers for contributions, minimize documentation maintenance burdens, and make the documentation area more inclusive. + +[![Gatsby Days LA Video 7: React component library documentation with Gatsby and MDX - Kathleen McMahon](https://res.cloudinary.com/marcomontalbano/image/upload/v1586291370/video_to_markdown/images/youtube--ZgHkX7S_po8-c05b58ac6eb4c4700831b2b3070cd403.jpg)](https://www.youtube.com/watch?v=ZgHkX7S_po8 "Gatsby Days LA Video 7: React component library documentation with Gatsby and MDX - Kathleen McMahon") diff --git a/docs/blog/2020-04-10-LA-2020-Libere/index.md b/docs/blog/2020-04-10-LA-2020-Libere/index.md new file mode 100644 index 0000000000000..7ad09b76ff216 --- /dev/null +++ b/docs/blog/2020-04-10-LA-2020-Libere/index.md @@ -0,0 +1,19 @@ +--- +title: "Gatsby Days LA 2020 Video 8: Building Excitement for Gatsby in East Africa" +date: 2020-04-10 +author: Greg Thomas +excerpt: "East African web developer Gatare Libère explains how he first discovered Gatsby and describes the enthusiastic reception he has been experiencing whenever he discusses Gatsby at regional events." +tags: + - gatsby-days + - accessibility + - community + - hacktoberfest +--- + +_Welcome to the Gatsby Days 2020 Video Blog: Los Angeles Edition. In this series of eleven videos, you can catch up with all the wit and wisdom shared at the presentations from our February community gathering in LA. If you weren’t able to make it in person, these videos are the next best thing to owning a time machine! (Though owning a time machine would be super cool for sure, joining us at our next Gatsby Days—currently scheduled as a virtual event June 2nd-3rd—would be pretty awesome, too 💜. Follow [Gatsby on Twitter](https://twitter.com/gatsbyjs) to keep up with when registration starts, speaker announcements and other developments)._ + +For East African developer Gatare Libère, the journey to Gastby began with a search for new swag. Gatare is a software engineer and co-founder of Sewilio, a company based in Burundi that offers a full range of mobile, web, cloud, and security services. Right after Hacktoberbest, Gatare went looking for new t-shirts and earned some Gatsby swag by becoming a community contributor. A podcast about Gatsby on Syntax.fm then spurred him to explore Gatsby further in a small project. + +Before long, Gatare was not only using Gatsby but also speaking about Gatsby at events across East Africa. At each stop, he has met with developers who are eager to learn how to build blazing fast websites and web applications with Gatsby. Watch this video from Gatare to hear more about what has drawn him to the framework, what motivates other developers in his part of the world, and how to best connect with a fast-growing community of web developers in East Africa. + +[![Gatsby Days LA Video 8: Gatsby in East Africa - Gatare Libère - Gatsby Days LA 2020](https://res.cloudinary.com/marcomontalbano/image/upload/v1586291772/video_to_markdown/images/youtube--bCGpYceSZNM-c05b58ac6eb4c4700831b2b3070cd403.jpg)](https://www.youtube.com/watch?v=bCGpYceSZNM "Gatsby Days LA Video 8: Gatsby in East Africa - Gatare Libère - Gatsby Days LA 2020") diff --git a/docs/blog/2020-04-13-LA-2020-Comeau/index.md b/docs/blog/2020-04-13-LA-2020-Comeau/index.md new file mode 100644 index 0000000000000..aff7f51e367dc --- /dev/null +++ b/docs/blog/2020-04-13-LA-2020-Comeau/index.md @@ -0,0 +1,22 @@ +--- +title: "Gatsby Days LA 2020 Video 9: Empowering Content Creators with a Headless CMS and Gatsby" +date: 2020-04-13 +author: Greg Thomas +excerpt: "Is it possible to satisfy web users, developers, and content creators all at once? Gatsby’s own Josh Comeau explained how Gatsby can achieve exactly that at Gatsby Days LA 2020." +tags: + - gatsby-days + - content-mesh + - gatsby-cloud + - gatsby-preview + - headless-cms +--- + +_Welcome to the Gatsby Days 2020 Video Blog: Los Angeles Edition. In this series of eleven videos, you can catch up with all the wit and wisdom shared at the presentations from our February community gathering in LA. If you weren’t able to make it in person, these videos are the next best thing to owning a time machine! (Though owning a time machine would be super cool for sure, joining us at our next Gatsby Days—currently scheduled as a virtual event June 2nd-3rd—would be pretty awesome, too 💜. Follow [Gatsby on Twitter](https://twitter.com/gatsbyjs) to keep up with when registration starts, speaker announcements and other developments)._ + +Developers are adept at coding engaging, high-performance websites and web applications. Unfortunately, dev-driven sites are often difficult for content creators to update and manage. + +On the flip side, content management systems like WordPress simplify life for content creators but blunt the ability for developers to use the advanced tools they know and love. Gatsby software engineer Josh Comeau asks whether there is a way to deliver great end-user experiences while satisfying both developers and content creators. _(Spoiler alert: The answer is Gatsby)_. + +Learn how using Gatsby in conjunction with a headless CMS can help you build user-friendly and developer-friendly websites while giving content creators a front end they can navigate without technical expertise. Josh highlights the value of using a content mesh approach in assembling best-of-breed capabilities. And he demonstrates the benefits of Gatsby Preview for content creators to view their changes in real time as they add text, insert images, and even modify site-wide navigation. + +[![Gatsby Days LA Video 9: Empowered Workflows with Gatsby.js - Josh Comeau - Gatsby Days LA 2020](https://res.cloudinary.com/marcomontalbano/image/upload/v1586292534/video_to_markdown/images/youtube--_FiUAqf716k-c05b58ac6eb4c4700831b2b3070cd403.jpg)](https://www.youtube.com/watch?v=_FiUAqf716k "Gatsby Days LA Video 9: Empowered Workflows with Gatsby.js - Josh Comeau - Gatsby Days LA 2020") diff --git a/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/index.md b/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/index.md new file mode 100644 index 0000000000000..faeadf524cef6 --- /dev/null +++ b/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/index.md @@ -0,0 +1,286 @@ +--- +title: "Rebuilding the Agility CMS Website with Gatsby (one page at a time)" +date: 2020-04-13 +author: Joel Varty +excerpt: "When the Agility CMS team asked Joel Varty, the company's president, for a home page upgrade that could handle not only content but new integrations with mar-tech apps, he jumped at the chance to build it on Gatsby using JAMstack architecture." +tags: + - netlify + - headless-cms + - jamstack +--- + +I've been preaching about JAMStack for a while now, and lately I've been talking a lot about how you can [move your website to JAMStack without rebuilding everything](https://agilitycms.com/resources/posts/migrate-to-jamstack-now-no-excuses). + +I decided it was time to take my own advice and upgrade my company's website, [agilitycms.com](https://agilitycms.com), starting with the home page, and adding pages and sections over time. Agility CMS is a headless content management system running in the cloud (Microsoft Azure). The current website is built on ASP.Net Core. Our marketing team came to me with a request to build a brand new home page which included not only updating content, but a brand new design, new modules, and new integrations with mar-tech. + +This was just the opportunity I’d been looking for: A chance to practice what I've been preaching! What's also great is the current .net website is already built using a headless CMS, so I don't have to rewrite or migrate any content. + +## Goals + +- Build the new home page using [Gatsby](https://www.gatsbyjs.org/) +- Re-use much of the existing site content from [our headless cms](https://agilitycms.com/) +- Zero downtime + +## tl;dr + +For those of you who just want to see the code for the new site (it only has code for the modules that are on the homepage right now, but it will expand over time), it's all here on GitHub: https://github.com/agility/agility-website-gatsby. + +## Steps + +Here's what I did to get everything up and running, right from coding the new site, to deploying, testing and flipping over the DNS. + +Get it running locally with Gatsby +Implement the Header and Footer +Create a new Home Page +Run it in Gatsby Cloud +Deploy to Netlify +Setup the CDN to do the Edge Routing + +What's really cool is that this workflow isn't just for upgrading Agility websites to JAMstack - you can use it for any website! Now let’s break each step into specific details. + +## Step 1: Get it running locally with Gatsby + +It's really easy to get started creating a Gatsby website with Agility CMS. Just clone the [starter repo from github](https://github.com/agility/agility-gatsby-starter), open up the folder in [VS Code](https://code.visualstudio.com/) and pop in your API Keys. + +```shell +git clone https://github.com/agility/agility-gatsby-starter.git +``` + +Now, find your API keys on the Getting Started page of the [Agility CMS Content Manager](https://manager.agilitycms.com/) + +![Agility CMS Getting Started landing page](post-image-1.png "Agility CMS Screenshot") + +Put your keys into the **.env.development** and **.env.production** files. They look something like this and have instructions about which values go where. + +```text +# Your Instance Id +AGILITY_GUID= + +# Your Preview API Key (recommended) - you can get this from the Getting Started Page in Agility CMS +AGILITY_API_KEY= + +# If using your Preview API Key, set this to true +AGILITY_API_ISPREVIEW=true + +# If you want to enable /__refresh endpoint +ENABLE_GATSBY_REFRESH_ENDPOINT=true +``` + +Now, check out the **gatsby.config** file - it has a section for plugins, and the Agility CMS source plugin is called **@agility/gatsby-source-agilitycms**. Check that the language code and channel name matches what you have in your Agility CMS instance. + +### Modules and Page Templates + +Since this was an existing website, we already had a few Page Templates and Module Definitions set up in the instance. We need to make sure we at least have placeholders in our new Gatsby project for those, and we'll just implement whatever is needed for our new home page. + +![Project Folder Structure](post-image-2.png "Project Folder Structure") + +There are folders for Page Templates and Modules, and you can just put in placeholder React code for these right now. + +Here's an example Page Template component with a single content zone called "Main": + +```jsx +import React from "react" +import ContentZone from "../agility/components/ContentZone" + +const MainTemplate = props => { + return ( +
+ +
+ ) +} +export default MainTemplate +``` + +Here's an example Module component that doesn't do anything except output its name. + +```jsx +import React from "react" + +const LatestResources = ({ item }) => { + return
LatestResources
+} + +export default LatestResources +``` + +When I got all those things in place, I started up Gatsby to see what would happen. + +```shell +gatsby develop +``` + +Gatsby will pull down all the content for our website and put it into GraphQL. This is a _content sync_, so from now on it will only pull down a delta (what's changed) from Agility CMS. +![Alt Text](post-image-3.png "Agility CMS - Gatsby - Terminal Output") + +## Step 2: Implement the Header and Footer + +We need to make our new website look just like the old one, so we need to match the colors, fonts, and other visual styles as much as we can. You may want to pull in the CSS from your old site—or start from scratch if you want to make a clean break. + +Either way, this is a great time to familiarize yourself with the GraphQL data in your website. Point your browser to [http://localhost:8000/\_\_\_graphql](http://localhost:8000/___graphql) to start exploring your data, and you can build the query access and Shared Content or Sitemap data. You can see that all content is available, grouped by content definition name. + +Here's the query that I used to grab a Global Header shared content item, as well as the nested sitemap as JSON. + +```graphql +query GlobalHeaderQuery { + agilityGlobalHeader(properties: { referenceName: { eq: "globalheader" } }) { + customFields { + marketingBanner + logo { + url + label + } + } + preHeaderLinks { + customFields { + title + uRL { + href + target + text + } + } + } + } + agilitynestedsitemap { + internal { + content + } + } +} +``` + +Your query will look different, of course, but I hope you get the idea of how to query your Agility CMS content. + +Now, you can create a component that uses a `` to pull in the data and make it available. Check out the example `GlobalHeader.js` component in your project for an example of that. + +## Step 3: Create a new Home Page + +In Agility CMS, the first page in your sitemap is considered your Home Page. So, I created a new home page and temporarily called it home-2. I didn't publish it, but this meant that I could use this to build out the modules on the new home page. + +![Agility CMS Screenshot - temporary home page](post-image-4.png "Agility CMS Screenshot - Home Page") + +I created a couple of new Module Definitions that I needed for the new page design, so I created new react components in the **modules** folder for those. The amazing thing about the Agility CMS Gatsby implementation is that nearly all the data that you need to render a module on a page is given to you in a property called **item**. + +What I normally do is just `console.log("ModuleName", item)` so I can see exactly what that data looks like. Then run the site locally in your browser http://localhost:8000 and open up your Developer Tools to see what it looks like in the console. + +### Hot Reloading - Code and Content + +One of the best things about React development with Gatsby is that everything can be hot reloaded, including the content! + +If you leave your browser open beside your code, you can just make changes and see them. Additionally, if you open a second terminal window, you can also pull down any changes that you make to the content in Agility CMS without having to run `gatsby develop` again. + +```shell +curl -X POST http://localhost:8000/__refresh +``` + +Here's a side-by-side screenshot of my 2 monitor setup. You can see that I have 2 terminal windows opened in VS Code. + +![two screens side by side showing hot reloading website and the Gatsby code for it](post-image-5.png "Side-by-side Hot Module Reload") + +I really love this workflow! It makes it really easy to tweak things and see the changes instantly. + +## Step 4: Run it in Gatsby Cloud + +To get going, [Gatsby Cloud](https://www.gatsbyjs.com/) is the easiest way to Preview and Build Gatsby sites. The free version is enough to get you started. + +Push your code to a GitHub repo, sign up for Gatsby Cloud, and create a new site. When asked, simply choose "I already have a Gatsby site" and don't add any integrations just now. + +![landing page for Gatsby Cloud Create New Site](post-image-6.png "Gatsby Cloud - Create New Site") + +You can securely add your API Keys in the Environment Variable section of Settings. + +![Gatsby webpage for setting environment variables](post-image-7.png "Gatsby Cloud - Environment Variables") + +Now you can take the Preview link from Gatsby and plug that into Agility CMS in the Domain Configuration area of the Settings section. + +Additionally, Gatsby gives you webhook URLs for Preview and Build. You can go ahead and plug these into the Webhook area in Agility Settings. + +## Step 5: Deploy to Netlify + +Netlify is a really great service to easily host static websites. Even better, it integrates seamlessly so that Gatsby can automatically deploy your website to Netlify when it builds! + +Go ahead and create a free Netlify account and point to it under the Gatsby **Hosting Integrations** settings section. + +Since Gatsby is going to be building the LIVE version of our site, we need to publish our new Homepage in Agility. If you've reviewed everything in Preview and you're ready to go, the first thing you need to do is to disable the Syncing Web Servers for the existing website in Agility CMS. You'll have to coordinate this with your content team, of course. + +When I was testing all this out, I actually built my new site using the Preview API Keys temporarily. That way I could verify everything was working first. + +In the end, you're going to end up with a URL to your new home page in Netlify. + +## Step 6: Setup the CDN to do the Edge Routing + +We can use Edge computing to decide whether to route to the new website or the old one, depending on the page. + +In this example, I decided to use a [Stackpath](https://www.stackpath.com/) Script to do this for us. + +You set up a Stackpath site just like normal, but pointing to your OLD website's unique hostname. It can't be your public DNS name - you need to have another unique way to address that site. For example, since our website is hosted in an Azure App Service, we get an azurewebsites.net URL. + +Now you create a Script in Stackpath to do the routing. In our case, we ONLY want to route requests to the home page, plus any Gatsby-specific stuff, to our new website. + +You can also see that I'm only allowing for 60 seconds on caching in the CDN for all requests. This is because we don't have anything built into this workflow to clear the cache in this CDN, and I don't want my content team to have to wait too long to see their changes. I'll take care of that later. + +```javascript +// sample script +addEventListener("fetch", event => { + event.respondWith(handleRequest(event.request)) +}) + +/** + * Fetch and return the request body + * @param {Request} request + */ +async function handleRequest(request) { + // Wrap your script in a try/catch and return the error stack to view error information + try { + /* The request can be modified here before sending it with fetch */ + + const originalUrl = request.url + const url = new URL(request.url) + // we need get the url in order to figure out where to route them + let path = url.pathname + + //secondary domain... + const secDomain = "https://my-new-website.netlify.com" + + if ( + path == "/" || //redirect the home page... + path.indexOf("/webpack") != -1 || + path.indexOf("/common") != -1 || + path.indexOf("/component") != -1 || + path.indexOf("/page-data") != -1 || + path.indexOf("/styles") != -1 || + path.indexOf("/app-") != -1 + ) { + // we need get the url in order to figure out where to route them + request.url = secDomain + path + } + + const response = await fetch(request) + + response.headers.set("Cache-Control", "public, max-age=60") + + return response + } catch (e) { + return new Response(e.stack || e, { status: 500 }) + } +} +``` + +You can now test this whole thing with the unique Stackpath URL that you get (123xyz.stackpathcdn.com). + +Once you are happy with everything, you simply switch your DNS to point to Stackpath. + +That's it—you’re finished! + +If you have any questions about JAMstack or migrating to this technology, reach out! + +## Next Steps + +I encourage you to go ahead and use this technique as the starting point for one of the pages on your own website! You can use [Agility CMS for free](https://agilitycms.com/v3-free-signup-developers?source=devto) to do it. + +## BONUS CONTENT! + +As a companion to this article, I recorded a video that walks you through the steps I took and the different tools involved. I also highlight some of the really neat features of Agility CMS, Gatsby, Netlify, and Stackpath. + +[![Migrating a website to JAMstack with Gatsby](https://res.cloudinary.com/marcomontalbano/image/upload/v1586464859/video_to_markdown/images/youtube--WSIzYKDgJuE-c05b58ac6eb4c4700831b2b3070cd403.jpg)](https://www.youtube.com/embed/WSIzYKDgJuE "Migrating a website to JAMstack with Gatsby") diff --git a/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-1.png b/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-1.png new file mode 100644 index 0000000000000..d2fb5577091c0 Binary files /dev/null and b/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-1.png differ diff --git a/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-2.png b/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-2.png new file mode 100644 index 0000000000000..3e39d4759e004 Binary files /dev/null and b/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-2.png differ diff --git a/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-3.png b/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-3.png new file mode 100644 index 0000000000000..56acad47f7885 Binary files /dev/null and b/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-3.png differ diff --git a/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-4.png b/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-4.png new file mode 100644 index 0000000000000..8591c472e3d6a Binary files /dev/null and b/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-4.png differ diff --git a/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-5.png b/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-5.png new file mode 100644 index 0000000000000..f774a63b65e2d Binary files /dev/null and b/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-5.png differ diff --git a/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-6.png b/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-6.png new file mode 100644 index 0000000000000..db693894bb03a Binary files /dev/null and b/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-6.png differ diff --git a/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-7.png b/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-7.png new file mode 100644 index 0000000000000..029071e04469c Binary files /dev/null and b/docs/blog/2020-04-13-upgrading-to-jamstack-with-agility/post-image-7.png differ diff --git a/docs/blog/2020-04-14-LA-2020-Kim/index.md b/docs/blog/2020-04-14-LA-2020-Kim/index.md new file mode 100644 index 0000000000000..1717d54988795 --- /dev/null +++ b/docs/blog/2020-04-14-LA-2020-Kim/index.md @@ -0,0 +1,23 @@ +--- +title: 'Gatsby Days LA 2020 Video 10: Teaching Web Development to Beginners with Gatsby' +date: 2020-04-14 +author: Greg Thomas +excerpt: "UC Davis grad student Daniel Kim explains why Gatsby offers +a better framework than React for teaching beginning web development +at Gatsby Days LA 2020." +tags: +- gatsby-days +- community +- themes +- contentful +- markdown +- learning-to-code +--- + +_Welcome to the Gatsby Days 2020 Video Blog: Los Angeles Edition. In this series of eleven videos, you can catch up with all the wit and wisdom shared at the presentations from our February community gathering in LA. If you weren’t able to make it in person, these videos are the next best thing to owning a time machine! (Though owning a time machine would be super cool for sure, joining us at our next Gatsby Days—currently scheduled as a virtual event June 2nd-3rd—would be pretty awesome, too 💜. Follow [Gatsby on Twitter](https://twitter.com/gatsbyjs) to keep up with when registration starts, speaker announcements and other developments)._ + +Daniel Kim is a master’s degree student at the University of California, Davis, and the founder of [Bit Project](https://www.bitproject.org/)—a student organization that strives to make technical education accessible to more people. With over 70 developer members, Bit Project has reached more than 3,100 students. The group is working to expand its community through technical workshops, coding bootcamps, and outreach programs. + +Daniel and his team recently decided to launch a new five-week course to teach beginning web development. They considered using React as the basis for the course but found that the extreme customization available with React made it tough for teaching. Instead, they chose Gatsby. Watch Daniel’s presentation from Gatsby Days LA 2020 and learn why hot reloading, routing, themes, and other capabilities made Gatsby the right choice. Then hear how Daniel used Gatsby in each week of the course to teach key web development concepts. + +[![Teaching Web Development to Beginners with Gatsby.js - Daniel Kim - Gatsby Days LA 2020](https://res.cloudinary.com/marcomontalbano/image/upload/v1586358897/video_to_markdown/images/youtube--XQ1hGhIk1IA-c05b58ac6eb4c4700831b2b3070cd403.jpg)](https://www.youtube.com/watch?v=XQ1hGhIk1IA "Teaching Web Development to Beginners with Gatsby.js - Daniel Kim - Gatsby Days LA 2020") diff --git a/docs/blog/2020-04-14-virtual-gatsby-days-registration/index.md b/docs/blog/2020-04-14-virtual-gatsby-days-registration/index.md new file mode 100644 index 0000000000000..25e1d7dd5bc88 --- /dev/null +++ b/docs/blog/2020-04-14-virtual-gatsby-days-registration/index.md @@ -0,0 +1,22 @@ +--- +title: "Register Now for Virtual Gatsby Days" +date: 2020-04-14 +author: Laci Texter +excerpt: "Our first virtual Gatsby Days is coming June 2-3, 2020. This free remote conference will be live-streamed on YouTube from 9am to noon PST both days, and you can register now!" +tags: + - gatsby-days + - community + - gatsby-cloud +--- + +Back before the world changed so suddenly, our quarterly Gatsby Days was an in-person gathering: A way to assemble the Gatsby community, guest presenters, and Gatsby team for a day long conference and celebration-slash-deep-dive into all things Gatsby. Obviously we can't do things the same way right now, but that's not going to stop us! + +Join us for Virtual Gatsby Days, June 2nd and 3rd: Registration is now open at https://www.gatsbyjs.com/virtual-gatsby-days. + +![Gatsby logo transposed on binary background receding into distance](./virutalGatsbyDays.jpg "Gatsby logo on futuristic binary background") + +Virtual Gatsby Days will continue to focus on the future of modern website development, use cases, and deep dive into topics such as what’s new in the web technology stack. You’ll have the opportunity to participate in a Q&A with Gatsby creators, Kyle Mathews and Sam Bhagwat, hear other Gatsby core team members speak, and connect with the Gatsby community. + +Register for Virtual Gatsby Days, mark your calendars for June 2nd - 3rd and follow [Gatsby on Twitter](https://twitter.com/gatsbyjs) to keep up with speaker announcements and other developments. + +In case you missed it, we’re still accepting speaker submissions! [Fill out the CFP](https://docs.google.com/forms/d/e/1FAIpQLSfjUpqpmRL18ydo_PmC4jxvPG8xhOlix43KeRHOhUbPp3u7Mw/viewform?usp=sf_link) by April 24th and let us know what you’re thinking about. We’re reviewing submissions as they come in so the earlier you submit, the better. diff --git a/docs/blog/2020-04-14-virtual-gatsby-days-registration/virutalGatsbyDays.jpg b/docs/blog/2020-04-14-virtual-gatsby-days-registration/virutalGatsbyDays.jpg new file mode 100644 index 0000000000000..41447c26fad69 Binary files /dev/null and b/docs/blog/2020-04-14-virtual-gatsby-days-registration/virutalGatsbyDays.jpg differ diff --git a/docs/blog/2020-04-15-LA-2020-estevez/index.md b/docs/blog/2020-04-15-LA-2020-estevez/index.md new file mode 100644 index 0000000000000..92c9230f10388 --- /dev/null +++ b/docs/blog/2020-04-15-LA-2020-estevez/index.md @@ -0,0 +1,20 @@ +--- +title: "Gatsby Days LA 2020 Video 11: Building Accessible Components (Without First Reading Docs for Days)" +date: 2020-04-15 +author: Greg Thomas +excerpt: "New York Times Senior Software Engineer Yuraima Estevez shows how developers can improve the accessibility of websites in three “easy” steps that do not involve days of documentation reading." +tags: + - gatsby-days + - community + - accessibility + - documentation + - diversity-and-inclusion +--- + +_Welcome to the Gatsby Days 2020 Video Blog: Los Angeles Edition. In this series of eleven videos, you can catch up with all the wit and wisdom shared at the presentations from our February community gathering in LA. If you weren’t able to make it in person, these videos are the next best thing to owning a time machine! (Though owning a time machine would be super cool for sure, joining us at our next Gatsby Days—currently scheduled as a virtual event June 2nd-3rd—would be pretty awesome, too 💜. Follow [Gatsby on Twitter](https://twitter.com/gatsbyjs) to keep up with when registration starts, speaker announcements and other developments)._ + +Yuraima Estevez is a senior software engineer tech lead at the New York Times who is passionate about building open source tools and enabling empathetic web development. At Gatsby Days LA 2020, Yuraima focused on accessibility. To realize the power of the web’s universality, developers must build sites that are accessible to people with disabilities. But doing so can be challenging, especially when there is a ton of documentation to sort through. + +Yuraima believes that building accessible components can help streamline progress toward delivering accessible sites. Learn how you can increase accessibility and improve support for assistive technologies as you are building components through three “easy” steps: using semantic HTML whenever possible, employing ARIA (Accessible Rich Internet Applications) attributes, and integrating keyboard navigation capabilities. Adopting this three-step approach also makes much more efficient use of available documentation. + +[![TL;DR for Accessible Components - Yuraima Estevez - Gatsby Days LA 2020](https://res.cloudinary.com/marcomontalbano/image/upload/v1586365251/video_to_markdown/images/youtube--Qu3HuUKLNh8-c05b58ac6eb4c4700831b2b3070cd403.jpg)](https://www.youtube.com/watch?v=Qu3HuUKLNh8&t=1s "TL;DR for Accessible Components - Yuraima Estevez - Gatsby Days LA 2020") diff --git a/docs/blog/author.yaml b/docs/blog/author.yaml index 0ab3a35b3a8ff..a65839e41bcd8 100644 --- a/docs/blog/author.yaml +++ b/docs/blog/author.yaml @@ -403,6 +403,14 @@ - id: Greg Thomas bio: "Writer, musician, historian, and dedicated service provider for the dog." avatar: avatars/greg-thomas.jpg +- id: Laci Texter + bio: "Runner of rivers and city blocks, car-a-oke queen, and very fond of snail mail. Enjoys the art/science/sport of communication. Senior Brand Marketing Manager for Gatsby." + avatar: avatars/laci-texter.jpg + twitter: "@textercomm" - id: Debra Combs bio: "Product Manager, Writer, Gamer, and lover of laughter." avatar: avatars/debra-combs.png +- id: Joel Varty + bio: "Dad to teens, HS football coach, president of Agility CMS." + avatar: avatars/joel-varty.jpg + twitter: "@joelvarty" diff --git a/docs/blog/avatars/joel-varty.jpg b/docs/blog/avatars/joel-varty.jpg new file mode 100644 index 0000000000000..d140c70568672 Binary files /dev/null and b/docs/blog/avatars/joel-varty.jpg differ diff --git a/docs/blog/avatars/laci-texter.jpg b/docs/blog/avatars/laci-texter.jpg new file mode 100644 index 0000000000000..f0752937e7965 Binary files /dev/null and b/docs/blog/avatars/laci-texter.jpg differ diff --git a/docs/contributing/events.md b/docs/contributing/events.md index ddc52a72dc23c..00ebcbc46ceda 100644 --- a/docs/contributing/events.md +++ b/docs/contributing/events.md @@ -3,9 +3,9 @@ title: Gatsby Community Events description: Learn about other events happening around the globe to connect with other members of the Gatsby community --- -Interested in connecting with the Gatsby community in person? Take a look at the list below to see community-organized Gatsby events. +**IMPORTANT NOTE ON COMMUNITY EVENTS: Promotion and support of Gatsby community events is currently suspended due to COVID-19. Stay tuned for updates on when our community events program will resume.** -Want to see your event featured here? [Submit your Gatsby event info!](https://airtable.com/shrpwc99yogJm9sfI) +Interested in connecting with the Gatsby community in person? Take a look at the list below to see past community-organized Gatsby events. diff --git a/docs/contributing/how-to-file-an-issue.md b/docs/contributing/how-to-file-an-issue.md index bf9752f8a3e07..bd97e538f618c 100644 --- a/docs/contributing/how-to-file-an-issue.md +++ b/docs/contributing/how-to-file-an-issue.md @@ -2,14 +2,22 @@ title: How to File an Issue --- -The [issue tracker](https://github.com/gatsbyjs/gatsby/issues) is the preferred channel for bug reports, feature requests and [submitting pull requests](/contributing/how-to-open-a-pull-request/). +The Gatsby GitHub [issue tracker](https://github.com/gatsbyjs/gatsby/issues) is the preferred channel for bug reports, documentation, feature requests and [submitting pull requests](/contributing/how-to-open-a-pull-request/). -If you want your issue to be resolved quickly, please include in your issue: +To resolve your issue, please select the appropriate category: + +- Bug Reports +- Documentation +- Feature Requests +- New [Translation](/contributing/translation#creating-a-new-translation) Requests + +For bug reports, include in your issue: - Gatsby version, Node.js version, OS version - The contents of your `gatsby-config.js` and `package.json` as well as your `gatsby-node.js`, `gatsby-browser.js` `gatsby-ssr.js` files depending on changes you've made there. +- A [reproduction](/contributing/how-to-make-a-reproducible-test-case/) for debugging and taking action Please do not use the issue tracker for personal support requests. [Stack Overflow](https://stackoverflow.com/questions/ask?tags=gatsby) (**gatsby** tag) and the [Gatsby Discord](https://gatsby.dev/discord) are better places to get help. @@ -18,7 +26,7 @@ Please do not use the issue tracker for personal support requests. [Stack Overfl If an issue is affecting you, start at the top of this list and complete as many tasks on the list as you can: 1. If there is an issue, add a reaction or more details to the issue to indicate that it's affecting you -2. If there is an issue and you can add more detail, write a comment describing how the bug is affecting you, OR if you can, write up a work-around for the bug -3. If there _is not_ an issue, write the most complete description of what's happening, preferably with a link to a Gatsby site that reproduces the problem or [create a reproducible test case](/contributing/how-to-make-a-reproducible-test-case/) -4. Offer to help fix the bug (and it is totally expected that you ask for help; open-source maintainers want to help contributors) +2. If there is an issue and you can add more detail, write a comment describing how the problem is affecting you, OR if you can, write up a work-around or improvement for the issue +3. If there _is not_ an issue, write the most complete description of what's happening, preferably with a link to a Gatsby site that reproduces the problem or [create a reproducible test case](/contributing/how-to-make-a-reproducible-test-case/) where applicable +4. Offer to help fix the issue (and it is totally expected that you ask for help; open-source maintainers want to help contributors) 5. [Deliver a well-crafted, tested PR](/contributing/how-to-open-a-pull-request/) diff --git a/docs/contributing/organize-a-gatsby-event.md b/docs/contributing/organize-a-gatsby-event.md index 18d6bb34a754d..849f66ffc22f5 100644 --- a/docs/contributing/organize-a-gatsby-event.md +++ b/docs/contributing/organize-a-gatsby-event.md @@ -2,12 +2,4 @@ title: Organize a Gatsby Event --- -Our communiy event support offerings and process are undergoing some changes. Check back soon to see our updated community event support opportunities! - -## What constitutes a Gatsby event? - -A community-organized Gatsby event can be a local meetup, a small conference, a “lunch and learn” with coworkers, or a larger event - as long as **it includes at least one Gatsby-focused presentation or discussion**. It’s up to you how many people you want to invite and how casual the environment. You can organize an event at your workplace or for the local community. - -## Related Links - -- [Gatsby's Community Events](/contributing/events/) +**IMPORTANT NOTE ON COMMUNITY EVENTS: Promotion and support of Gatsby community events is currently suspended due to COVID-19. Stay tuned for updates on when our community events program will resume.** diff --git a/docs/contributing/setting-up-your-local-dev-environment.md b/docs/contributing/setting-up-your-local-dev-environment.md index 16ad6809ad456..3186a5a1d3e2f 100644 --- a/docs/contributing/setting-up-your-local-dev-environment.md +++ b/docs/contributing/setting-up-your-local-dev-environment.md @@ -57,6 +57,12 @@ Yarn is a package manager for your code, similar to [NPM](https://www.npmjs.com/ - Note: if you plan to modify packages that are exported from `gatsby` directly, you need to either add those manually to your test sites so that they are listed in `package.json` (e.g. `yarn add gatsby-link`), or specify them explicitly with `gatsby-dev --packages gatsby-link`). +- If you've recently run `gatsby-dev` your `node_modules` will be out of sync with current published packages. In order to undo this, you can remove the `node_modules` directory or run: + +```shell +git checkout package.json; yarn --force +``` + ### Add tests - Add tests and code for your changes. diff --git a/docs/docs/add-custom-webpack-config.md b/docs/docs/add-custom-webpack-config.md index 60a40ee01668d..00c75eebc2a47 100644 --- a/docs/docs/add-custom-webpack-config.md +++ b/docs/docs/add-custom-webpack-config.md @@ -75,6 +75,19 @@ exports.onCreateWebpackConfig = ({ stage, actions }) => { You can always find more information on _resolve_ and other options in the official [Webpack docs](https://webpack.js.org/concepts/). +### Importing non-webpack tools using `yarn` + +Note that using absolute imports only applies to webpack resolutions and will not work for other tools, e.g. eslint or typescript. +But if you are using yarn, then the best practice is to set up your imports in package.json as shown below: + +```js +{ + "dependencies": { + "hooks": "link:./src/hooks", + } +} +``` + ### Modifying the Babel loader You need this if you want to do things like transpile parts of `node_modules`. diff --git a/docs/docs/add-seo-component.md b/docs/docs/add-seo-component.md index 8d471a2b20bd5..f955ce8686d61 100644 --- a/docs/docs/add-seo-component.md +++ b/docs/docs/add-seo-component.md @@ -4,11 +4,11 @@ title: "Adding an SEO Component" Every site on the web has basic _meta-tags_ like the title, favicon or description of the page in their `` element. This information gets displayed in the browser and is used when someone shares your website, e.g. on Twitter. You can give your users and these websites additional data to embed your website with more data — and that's where this guide for a SEO component comes in. At the end you'll have a component you can place in your layout file and have rich previews for other clients, smartphone users, and search engines. -_Note: This component will use StaticQuery. If you're unfamiliar with that, have a look at the [StaticQuery documentation](/docs/static-query/). You also have to have `react-helmet` installed for which you can have a look at [this document](/docs/add-page-metadata)._ +_Note: This component will use `useStaticQuery`. If you're unfamiliar with that, have a look at the [useStaticQuery documentation](/docs/use-static-query/). You also have to have `react-helmet` installed for which you can have a look at [this document](/docs/add-page-metadata)._ ## gatsby-config.js -Gatsby makes all data put into the `siteMetadata` section of your `gatsby-config` file automatically available in GraphQL and therefore it's a good idea to place your information for the component there. +Gatsby automatically exposes the `siteMetadata` section of the `gatsby-config` file in the GraphQL datalayer. It's considered best practice to place your site meta information there. ```js:title=gatsby-config.js module.exports = { @@ -26,15 +26,16 @@ module.exports = { ## SEO component -Create a new component with this initial boilerplate: +First create a new component with this initial boilerplate. ```jsx:title=src/components/SEO.js import React from "react" -import { Helmet } from "react-helmet" import PropTypes from "prop-types" -import { StaticQuery, graphql } from "gatsby" +import { Helmet } from "react-helmet" +import { useLocation } from "@reach/router" +import { useStaticQuery, graphql } from "gatsby" -const SEO = ({ title, description, image, pathname, article }) => () +const SEO = ({ title, description, image, article }) => () export default SEO @@ -42,7 +43,6 @@ SEO.propTypes = { title: PropTypes.string, description: PropTypes.string, image: PropTypes.string, - pathname: PropTypes.string, article: PropTypes.bool, } @@ -50,7 +50,6 @@ SEO.defaultProps = { title: null, description: null, image: null, - pathname: null, article: false, } ``` @@ -59,12 +58,15 @@ SEO.defaultProps = { As the SEO component should also be usable in other files, e.g. a template file, the component also accepts properties for which you set sensible defaults in the `SEO.defaultProps` section. This way the information you put into `siteMetadata` gets used every time unless you define the property explicitly. -Now define the query and place it in the StaticQuery (you can also save the query in a constant). You can also alias query items, so `title` gets renamed to `defaultTitle`. +Now define the query and pass it to `useStaticQuery`. You can also alias query items, so `title` gets renamed to `defaultTitle`. ```jsx:title=src/components/SEO.js -const SEO = ({ title, description, image, pathname, article }) => ( - -) +const SEO = ({ title, description, image, article }) => { + const { location } = useLocation() + const { site } = useStaticQuery(query) + + return null +} export default SEO @@ -84,98 +86,97 @@ const query = graphql` ` ``` -The next step is to destructure the data from the query and to create an object that checks if the props were used — if not the default values are utilized. The name aliasing comes in handy here: It avoids name collisions. +The next step is to destructure the data from the query and create an object that checks if the props were used. If not, the default values are applied. Aliasing the properties comes in handy here to avoid name collisions. ```jsx:title=src/components/SEO.js -const SEO = ({ title, description, image, pathname, article }) => ( - { - const seo = { - title: title || defaultTitle, - description: description || defaultDescription, - image: `${siteUrl}${image || defaultImage}`, - url: `${siteUrl}${pathname || '/'}`, - } +const SEO = ({ title, description, image, article }) => { + const { pathname } = useLocation() + const { site } = useStaticQuery(query) + + const { + defaultTitle, + titleTemplate, + defaultDescription, + siteUrl, + defaultImage, + twitterUsername, + } = site.siteMetadata + + const seo = { + title: title || defaultTitle, + description: description || defaultDescription, + image: `${siteUrl}${image || defaultImage}`, + url: `${siteUrl}${pathname}`, + } - return () - }} - /> -) + return null +} export default SEO ``` -The last step is to return this data with the help of `Helmet`. Your complete SEO component should look like: +The last step is to return this data with the help of `Helmet`. Your complete SEO component should look like this. ```jsx:title=src/components/SEO.js import React from "react" -import { Helmet } from "react-helmet" import PropTypes from "prop-types" -import { StaticQuery, graphql } from "gatsby" - -const SEO = ({ title, description, image, pathname, article }) => ( - { - const seo = { - title: title || defaultTitle, - description: description || defaultDescription, - image: `${siteUrl}${image || defaultImage}`, - url: `${siteUrl}${pathname || "/"}`, - } +import { Helmet } from "react-helmet" +import { useLocation } from "@reach/router" +import { useStaticQuery, graphql } from "gatsby" + +const SEO = ({ title, description, image, article }) => { + const { pathname } = useLocation() + const { site } = useStaticQuery(query) + + const { + defaultTitle, + titleTemplate, + defaultDescription, + siteUrl, + defaultImage, + twitterUsername, + } = site.siteMetadata + + const seo = { + title: title || defaultTitle, + description: description || defaultDescription, + image: `${siteUrl}${image || defaultImage}`, + url: `${siteUrl}${pathname}`, + } - return ( - <> - - - - {seo.url && } - {(article ? true : null) && ( - - )} - {seo.title && } - {seo.description && ( - - )} - {seo.image && } - - {twitterUsername && ( - - )} - {seo.title && } - {seo.description && ( - - )} - {seo.image && } - - - ) - }} - /> -) + return ( + + + + + {seo.url && } + + {(article ? true : null) && } + + {seo.title && } + + {seo.description && ( + + )} + + {seo.image && } + + + + {twitterUsername && ( + + )} + + {seo.title && } + + {seo.description && ( + + )} + + {seo.image && } + + ) +} export default SEO @@ -183,7 +184,6 @@ SEO.propTypes = { title: PropTypes.string, description: PropTypes.string, image: PropTypes.string, - pathname: PropTypes.string, article: PropTypes.bool, } @@ -191,7 +191,6 @@ SEO.defaultProps = { title: null, description: null, image: null, - pathname: null, article: false, } diff --git a/docs/docs/adding-a-shopping-cart-with-snipcart.md b/docs/docs/adding-a-shopping-cart-with-snipcart.md new file mode 100644 index 0000000000000..b2427fa4e75ef --- /dev/null +++ b/docs/docs/adding-a-shopping-cart-with-snipcart.md @@ -0,0 +1,194 @@ +--- +title: Adding a Shopping Cart with Snipcart +--- + +Snipcart is a shopping cart solution designed to drop neatly into any web project. Including it in your project allows any HTML you write to instantly become a "buy button" with the addition of several Snipcart-specific attributes. + +Combine it with a source of products (like a CMS or an e-commerce platform such as [Etsy](https://www.etsy.com/)) and a payment processor (like [Stripe](https://www.stripe.com/)) to build a complete e-commerce experience for your customers. + +## Prerequisites + +To get started, you'll need to have the following set up: + +- A Gatsby site with [`gatsby-plugin-snipcart`](/packages/gatsby-plugin-snipcart/) installed +- A [Snipcart](https://snipcart.com/) account +- A Snipcart test API key +- A list of products to sell + +Installing the plugin adds Snipcart's shopping cart JavaScript for you, so you can get right to building your e-commerce site. It's okay if you're not sure what you'd like to sell quite yet. Using sample products is fine to begin with! + +## Defining Products + +Adding products with Snipcart involves writing HTML representing your product and adding a set of attributes to that HTML. You might write something similar to the following code block for each item in your catalog. This code could be part of a page like `index.js` or anywhere else you list a product. + +```jsx +
+

Silver Stacking Ring

+

$19.99

+

Wear one or seventeen! These rings are fun to mix and match.

+
+``` + +Including this information allows a visitor to see what you have for sale, but they can't do anything with that information quite yet. You'll need a way for customers to add individual items to their cart. Try adding a button with the following attributes. + +```jsx +
+

Silver Stacking Ring

+

$19.99

+

Wear one or seventeen! These rings are fun to mix and match.

+ + + +
+``` + +Snipcart uses these attributes (`data-item-*`) to figure out what your customer is trying to buy and how much to charge them. The ID, price, URL, and name attributes are all required but there are several other attributes that you can add to enhance the shopping cart. + +Importantly, `data-item-url` denotes the URL of the webpage displaying the product(s). Snipcart needs to crawl this page to validate the order. The web crawler looks for the HTML element with the `snipcart-add-item` CSS class as well as the `data-item-id` and checks what it finds there against whatever is in the cart. + +> Note that, while you're testing, a `data-item-url` value of `"/"` is fine. For the checkout flow to work, you will eventually need to replace this with the actual URL at which you've published your catalog or product page. + +To learn more about defining products, see the [Snipcart documentation](https://docs.snipcart.com/v3/setup/products). + +### Adding product variants + +Snipcart refers to variations like size and color as "product options" or "custom fields". You can add these custom fields to your products to allow customers to refine their orders. + +Building on the stacking ring example, suppose that you wanted to give your customer a choice between available sizes. You would do this by adding a custom field with a name and options. + +```jsx +
+

Silver Stacking Ring

+

$19.99

+

Wear one or seventeen! These rings are fun to mix and match.

+ +
+``` + +You can add multiple custom fields by incrementing the index of the `data-item-custom` attribute. Perhaps you want customers to have the ability to mark each item in their cart as a gift. + +```jsx +
+

Silver Stacking Ring

+

$19.99

+

Wear one or seventeen! These rings are fun to mix and match.

+ +
+``` + +### Selling digital products + +Snipcart enables the sale of digital goods such as e-books, photography, and other artwork. To sell a file you intend for download, you'll need to upload it to your Snipcart dashboard and then add the resulting guid as the value of the `data-item-file-guid` attribute to your product's markup. You can specify a file access expiry in days and a maximum number of downloads per order from the dashboard. + +```jsx +
+

Silver Stacking Ring

+

$19.99

+

Wear one or seventeen! These rings are fun to mix and match.

+ +
+``` + +## Customizing the cart + +Using Snipcart allows you to retain nearly complete control over your customers' experience on your e-commerce site. You can configure and customize the cart behavior as well as the product options. Look through your [Snipcart account settings](https://app.snipcart.com/dashboard/account/settings) to change things like currency, shipping options, and email templates. + +### Preventing automatic popups + +By default, the shopping cart will pop up every time a customer adds a product. To prevent this behavior, set the value of `autopop` to `false` in your `gatsby-config.js` file. + +```js:title=gatsby-config.js +{ + resolve: 'gatsby-plugin-snipcart', + options: { + apiKey: 'your-api-key', + autopop: false // highlight-line + } +}, +``` + +If you choose to prevent this popup, you'll need to give your customers some other way to access their shopping carts. Create a "show cart" button by giving a `button` element a class of `snipcart-checkout`. + +```jsx + +``` + +### Styling the cart + +You can override most aspects of the shopping cart, including the CSS. Try inspecting the element you'd like to customize and using your browser's developer tools to find the correct Snipcart class to override. + +```css +.snip-header { + background: #663399; +} + +.snip-layout__main-container { + border: 2px solid black; + padding: 3px; +} +``` + +You can also customize the cart template itself. For a complete list of Snipcart's components (with code examples), check out their [default theme reference](https://docs.snipcart.com/v3/themes/default/reference). + +## Connecting a payment processor + +Once you're ready to receive payments, connect your chosen payment processor to your Snipcart account from the dashboard. You'll also need to input your credit card information in order to get your live Snipcart key. + +The following quote is from the Snipcart [payment gateway page](https://app.snipcart.com/dashboard/account/gateway): + +> Please note that you can select only one payment gateway. However, you can also enable Paypal Express Checkout on top of any gateway you choose. +> +> Also, you can switch from a gateway to another whenever you want. + +## Other resources + +- [Build an E-commerce Site with Gatsby, DatoCMS, and Snipcart](/tutorial/e-commerce-with-datocms-and-snipcart/) tutorial +- [`gatsby-plugin-snipcart`](/packages/gatsby-plugin-snipcart/) +- [OneShopper Gatsby starter](/starters/rohitguptab/OneShopper/) +- Reference guide on [sourcing from Etsy](/docs/sourcing-from-etsy/) +- Reference guide on [processing payments with Stripe](/docs/processing-payments-with-stripe/) +- From the Snipcart blog: [E-Commerce for React Developers [w/ Gatsby Tutorial]](https://snipcart.com/blog/react-ecommerce-gatsby-tutorial) +- [Snipcart documentation](https://docs.snipcart.com/v3/setup/installation) diff --git a/docs/docs/adding-page-transitions-with-plugin-transition-link.md b/docs/docs/adding-page-transitions-with-plugin-transition-link.md index bca376c4e36a1..71a7ccefe1dfc 100644 --- a/docs/docs/adding-page-transitions-with-plugin-transition-link.md +++ b/docs/docs/adding-page-transitions-with-plugin-transition-link.md @@ -159,7 +159,7 @@ module.exports = { ]; ``` -As always, check out [the installation docs](https://transitionlink.tylerbarnes.ca/docs/transitionportal/) for more information. +As always, check out [the installation docs](https://transitionlink.tylerbarnes.ca/docs/installation/) for more information. ## Further reading diff --git a/docs/docs/audit-with-lighthouse.md b/docs/docs/audit-with-lighthouse.md index 3426d0c6dca96..55bb3595b813d 100644 --- a/docs/docs/audit-with-lighthouse.md +++ b/docs/docs/audit-with-lighthouse.md @@ -42,9 +42,17 @@ Now run your first Lighthouse test. ![Lighthouse audit results](./images/lighthouse-audit-results.png) -As you can see, Gatsby's performance is excellent out of the box but we're missing some things for PWA, Accessibility, Best Practices, and SEO that will improve your scores (and in the process make your site much more friendly to visitors and search engines). To improve your scores further, see the links under "Next steps" below. +## Using Gatsby Cloud -Next steps: +If you use [Gatsby Cloud](https://www.gatsbyjs.com/cloud), a Lighthouse audit runs automatically every time your site builds. In Cloud, every code commit to GitHub triggers a build, allowing you to see any shifts in your scores and what code changes caused them. + +![Lighthouse audit in Cloud](./images/cloud-lighthouse.png) + +## Next steps + +As you can see, Gatsby's performance is excellent out of the box but we're missing some things for PWA, Accessibility, Best Practices, and SEO that will improve your scores (and in the process make your site much more friendly to visitors and search engines). + +To improve your scores further, see the links below: - [Add a manifest file](/docs/add-a-manifest-file/) - [Add offline support](/docs/add-offline-support-with-a-service-worker/) diff --git a/docs/docs/building-an-e-commerce-site.md b/docs/docs/building-an-e-commerce-site.md index 5eeecc6381b00..bf4bc0d56b7b1 100644 --- a/docs/docs/building-an-e-commerce-site.md +++ b/docs/docs/building-an-e-commerce-site.md @@ -4,7 +4,7 @@ title: Building an E-commerce Site The speed and performance of sites built with Gatsby make it a great tool for building e-commerce sites. There are existing plugins for connecting services like [Shopify](/packages/gatsby-source-shopify/) and [Snipcart](/packages/gatsby-plugin-snipcart/) to Gatsby, and this section contains reference guides to help get things setup. -To see examples of e-commerce sites built with Gatsby, check out the [showcase](/showcase/?filters%5B0%5D=eCommerce). +To see examples of e-commerce sites built with Gatsby, check out the [showcase](/showcase/?filters%5B0%5D=E-commerce). diff --git a/docs/docs/creating-a-generic-plugin.md b/docs/docs/creating-a-generic-plugin.md index 9ddcca4d41227..265ded7e2626f 100644 --- a/docs/docs/creating-a-generic-plugin.md +++ b/docs/docs/creating-a-generic-plugin.md @@ -4,7 +4,7 @@ title: Creating a Generic Plugin This section aims to explain the structure of a Gatsby plugin and the files you need to create one. -The idea of a generic plugin is to lay more emphasis on the makeup of a plugin rather than the specific labels ([source](/docs/creating-a-source-plugin/), [transformer](/docs/creating-a-transformer-plugin/), [local](https://www.gatsbyjs.org/docs/creating-a-local-plugin/)) that are [selected based on functionality](/docs/naming-a-plugin/). As seen in the [what is a plugin doc](/docs/what-is-a-plugin/), a plugin is a piece of software that acts as an add-on and gives a Gatsby site additional functionality. +The idea of a generic plugin is to lay more emphasis on the makeup of a plugin rather than the specific labels ([source](/docs/creating-a-source-plugin/), [transformer](/docs/creating-a-transformer-plugin/), [local](/docs/creating-a-local-plugin/)) that are [selected based on functionality](/docs/naming-a-plugin/). As seen in the [what is a plugin doc](/docs/what-is-a-plugin/), a plugin is a piece of software that acts as an add-on and gives a Gatsby site additional functionality. Plugins contain a file, usually in the project root, called `package.json` - this file holds various metadata relevant to the project. The `package.json` file is also used to provide information to npm that identifies the project and allows npm to handle the project's dependencies. @@ -13,7 +13,7 @@ Plugins contain a file, usually in the project root, called `package.json` - thi To initialize a `package.json` for your project, run the following command: ```shell - npm init +npm init ``` Once you've run the command you'll see a series of options listed in the command-line interface (CLI). Those you select are stored in your `package.json` which contains some of the [files Gatsby looks for in a Plugin](/docs/files-gatsby-looks-for-in-a-plugin) @@ -27,7 +27,7 @@ In `gatsby-node.js` you can carry out functions with these APIs, such as: - Loading API keys - Sending calls to APIs - Creating Gatsby-nodes using the API response -- Creating Individual pages from nodes +- Creating individual pages from nodes > A good use case of the above would be a plugin that gets data from an API. @@ -36,16 +36,16 @@ In `gatsby-node.js` you can carry out functions with these APIs, such as: [sourceNodes](/docs/node-apis/#sourceNodes) is a life-cycle API that a plugin can use to create Nodes. An example of how to implement a function using `sourceNodes` is shown below: ```javascript:title=gatsby-node.js -exports.sourceNodes = ({ actions, createNodeId, createContentDigest })=>{ +exports.sourceNodes = ({ actions, createNodeId, createContentDigest }) => { const nodeData = { - title : "Test Node", - description:"Testing the node " + title: "Test Node", + description: "Testing the node ", } const newNode = { ...nodeData, - id: createNodeId("TestNode-testid") - internal :{ - type: "TestNode" + id: createNodeId("TestNode-testid"), + internal: { + type: "TestNode", contentDigest: createContentDigest(nodeData), }, } diff --git a/docs/docs/creating-a-source-plugin.md b/docs/docs/creating-a-source-plugin.md index 0af8ac02d0e47..fa905bcb9711b 100644 --- a/docs/docs/creating-a-source-plugin.md +++ b/docs/docs/creating-a-source-plugin.md @@ -4,11 +4,11 @@ title: Creating a Source Plugin Source plugins are essentially out of the box integrations between Gatsby and various third-party systems. -These systems can be CMSs like Contentful or WordPress, other cloud services like Lever and Strava, or your local filesystem -- literally anything that has an API. Currently, Gatsby has [over 300 source plugins](/plugins/?=gatsby-source). +These systems can be CMSs like Contentful or WordPress, other cloud services like Lever and Strava, or your local filesystem -- literally anything that has an API. Currently, Gatsby has [over 400 source plugins](/plugins/?=gatsby-source). Once a source plugin brings data into Gatsby's system, it can be transformed further with **transformer plugins**. For step-by-step examples of how to create source and transformer plugins, check out the Gatsby [tutorials section](/tutorial/plugin-and-theme-tutorials/). -## What do source plugins do? +## Overview of a source plugin At a high-level, a source plugin: @@ -18,26 +18,59 @@ At a high-level, a source plugin: - Links nodes & creates relationships between them. - Lets Gatsby know when nodes are finished sourcing so it can move on to processing them. -## What does the code look like? +A source plugin is a regular npm package. It has a `package.json` file, with optional dependencies, as well as a [`gatsby-node.js`](/docs/api-files-gatsby-node) file where you implement Gatsby's [Node APIs](/docs/node-apis/). Read more about [files Gatsby looks for in a plugin](/docs/files-gatsby-looks-for-in-a-plugin/) or [creating a generic plugin](/docs/creating-a-generic-plugin). -A source plugin is a regular NPM package. It has a `package.json` file with optional -dependencies as well as a [`gatsby-node.js`](/docs/api-files-gatsby-node) file where you implement Gatsby's [Node -APIs](/docs/node-apis/). Read more about [Files Gatsby Looks for in a Plugin](/docs/files-gatsby-looks-for-in-a-plugin/). +## Implementing features for source plugins -Gatsby's minimum supported Node.js version is Node 8 and as it's common to want to use more modern Node.js and JavaScript syntax, many plugins write code in a -source directory and compile the code. All plugins maintained in the Gatsby repo -follow this pattern. +Key features that are often built into source plugins are covered in this guide to help explain Gatsby specific helpers and APIs, independent of the source the data is coming from. -Your `gatsby-node.js` should look something like: +> You can see examples of all the features implemented in this guide (sourcing data, caching, live data synchronization, and remote image optimization) **in the working example repository** for [creating source plugins](https://github.com/gatsbyjs/gatsby/tree/master/examples/creating-source-plugins) which contains a local server you can run to test with an example source plugin. -```javascript:title=gatsby-node.js +### Sourcing data and creating nodes + +All source plugins must fetch data and create nodes from that data. By fetching data and creating nodes at [build time](/docs/glossary#build), Gatsby can make the data available as static assets instead of having to fetch it at [runtime](/docs/glossary#runtime). This happens in the [`sourceNodes` lifecycle](/docs/node-apis/#sourceNodes) with the [`createNode` action](/docs/actions/#createNode). + +This example—taken from [the `sourceNodes` API docs](/docs/node-apis/#sourceNodes)—shows how to create a single node from hardcoded data: + +```javascript:title=source-plugin/gatsby-node.js +exports.sourceNodes = ({ actions, createNodeId, createContentDigest }) => { + const { createNode } = actions + + // Data can come from anywhere, but for now create it manually + const myData = { + key: 123, + foo: `The foo field of my node`, + bar: `Baz`, + } + + const nodeContent = JSON.stringify(myData) + + const nodeMeta = { + id: createNodeId(`my-data-${myData.key}`), + parent: null, + children: [], + internal: { + type: `MyNodeType`, + mediaType: `text/html`, + content: nodeContent, + contentDigest: createContentDigest(myData), + }, + } + + const node = Object.assign({}, myData, nodeMeta) + createNode(node) +} +``` + +Source plugins follow the same pattern, the only difference is that data comes from other sources. Plugins can leverage Node.js built-in functions like `http.get`, libraries like `node-fetch` or `axios`, or even fully-featured GraphQL clients to fetch data. With data being returned from a remote location, the plugin code can loop through and create nodes programmatically: + +```javascript:title=source-plugin/gatsby-node.js exports.sourceNodes = async ({ actions }) => { const { createNode } = actions - // Create nodes here, generally by downloading data - // from a remote API. + // Download data from a remote API. const data = await fetch(REMOTE_API) - // Process data into nodes. + // Process data and create nodes.using a custom processDatum function data.forEach(datum => createNode(processDatum(datum))) // You're done, return. @@ -45,120 +78,409 @@ exports.sourceNodes = async ({ actions }) => { } ``` -Peruse the [`sourceNodes`](/docs/node-apis/#sourceNodes) and -[`createNode`](/docs/actions/#createNode) docs for detailed -documentation on implementing those APIs. +The [`createNode`](/docs/actions/#createNode) function is a Gatsby specific action. `createNode` is used to create the nodes that Gatsby tracks and makes available for querying with GraphQL. -### Transforming data received from remote sources +_Note: **Be aware of asynchronous operations!** Because fetching data is an asynchronous task, you need to make sure you `await` data coming from remote sources, return a Promise, or return the callback (the 3rd parameter available in lifecycle APIs) from `sourceNodes`. If you don't, Gatsby will continue on in the build process, before nodes are finished being created. This can result in your nodes not ending up in the generated schema at compilation time, or the process could hang while waiting for an indication that it's finished. You can read more in the [Debugging Asynchronous Lifecycle APIs guide](/docs/debugging-async-lifecycles/)._ -Each node created by the filesystem source plugin includes the -raw content of the file and its _media type_. +### Caching data between runs -[A **media type**](https://en.wikipedia.org/wiki/Media_type) (also **MIME type** -and **content type**) is an official way to identify the format of -files/content that is transmitted on the internet, e.g. over HTTP or through -email. You might be familiar with other media types such as -`application/javascript`, `application/pdf`, `audio/mpeg`, `text/html`, -`text/plain`, `image/jpeg`, etc. +Some operations like fetching data from an endpoint can be performance heavy or time-intensive. In order to improve the experience of developing with your source plugin, you can leverage the Gatsby cache to store data between runs of `gatsby develop` or `gatsby build`. -Each source plugin is responsible for setting the media type for the nodes they -create. This way, source and transformer plugins can work together easily. +You access the `cache` in Gatsby Node APIs and use the `set` and `get` functions to store and retrieve data as JSON objects. -This is not a required field -- if it's not provided, Gatsby will [infer](/docs/glossary#inference) the type from data that is sent -- but it's the way for source plugins to indicate to -transformers that there is "raw" data that can still be further processed. It -also allows plugins to remain small and focused. Source plugins don't have to have -opinions on how to transform their data: they can set the `mediaType` and -push that responsibility to transformer plugins, instead. +```javascript:title=source-plugin/gatsby-node.js +exports.onPostBuild = async ({ cache }) => { + await cache.set(`key`, `value`) + const cachedValue = await cache.get(`key`) + console.log(cachedValue) // logs `value` +} +``` -For example, it's common for services to allow you to add content in -Markdown format. If you pull that Markdown into Gatsby and create a new node, what -then? How would a user of your source plugin convert that Markdown into HTML -they can use in their site? You would create a -node for the Markdown content and set its `mediaType` as `text/markdown` and the -various Gatsby Markdown transformer plugins would see your node and transform it -into HTML. +The above snippet shows a contrived example for the `cache`, but it can be used in more sophisticated cases to reduce the time it takes to run your plugin. For example, by caching a timestamp, you can use it to fetch solely the data that has been updated since the last time data was fetched from the source: -This loose coupling between the data source and the transformer plugins allow Gatsby site builders to assemble complex data transformation pipelines with -little work on their (and your (the source plugin author)) part. +```javascript:title=source-plugin/gatsby-node.js +exports.sourceNodes = async ({ cache }) => { + // get the last timestamp from the cache + const lastFetched = await cache.get(`timestamp`) -## Getting helper functions + // pull data from some remote source using cached data as an option in the request + const data = await fetch( + `https://remotedatasource.com/posts?lastUpdated=${lastFetched}` + ) + // ... +} -[`gatsby-node-helpers`](https://github.com/angeloashmore/gatsby-node-helpers), -a community-made NPM package, can help when writing source plugins. This -package provides a set of helper functions to generate Node objects with the -required fields. This includes automatically generating fields like node IDs -and the `contentDigest` MD5 hash, keeping your code focused on data gathering, -not boilerplate. +exports.onPostBuild = async ({ cache }) => { + // set a timestamp at the end of the build + await cache.set(`timestamp`, Date.now()) +} +``` -## Gotcha: don't forget to return! +> In addition to the cache, plugins can save metadata to the [internal Redux store](/docs/data-storage-redux/) with `setPluginStatus`. -After your plugin is finished sourcing nodes, it should either return a Promise or use the callback (3rd parameter) to report back to Gatsby when `sourceNodes` is fully executed. If a Promise or callback isn't returned, Gatsby will continue on in the build process, before nodes are finished being created. Without the necessary return statement your nodes might not end up in the generated schema at compilation time, or the process will hang while waiting for an indication that it's finished. +This can reduce the time it takes repeated data fetching operations to run if you are pulling in large amounts of data for your plugin. Existing plugins like [`gatsby-source-contentful`](https://github.com/gatsbyjs/gatsby/blob/master/packages/gatsby-source-contentful/src/gatsby-node.js) generate a token that is sent with each request to only return new data. -## Advanced +You can read more about the cache API, other types of plugins that leverage the cache, and example open source plugins that use the cache in the [build caching guide](/docs/build-caching). ### Adding relationships between nodes Gatsby source plugins not only create nodes, they also create relationships between nodes that are exposed to GraphQL queries. -There are two ways of adding node relationships in Gatsby: (1) transformations (parent-child) or (2) foreign-key based. - -#### Option 1: transformation relationships +There are two types of node relationships in Gatsby: (1) foreign-key based and (2) transformations (parent-child). -An example of a transformation relationship is the `gatsby-transformer-remark` plugin, which transforms a parent `File` node's markdown string into a `MarkdownRemark` node. The Remark transformer plugin adds its newly created child node as a child of the parent node using the action [`createParentChildLink`](/docs/actions/#createParentChildLink). Transformation relationships are used when a new node is _completely_ derived from a single parent node. E.g. the markdown node is derived from the parent `File` node and wouldn't ever exist if the parent `File` node hadn't been created. +#### Option 1: foreign-key relationships -Because all children nodes are derived from their parent, when a parent node is deleted or changed, Gatsby deletes all of the child nodes (and their child nodes, and so on) with the expectation that they'll be recreated again by transformer plugins. This is done to ensure there are no nodes left over that were derived from older versions of data but shouldn't exist any longer. +An example of a foreign-key relationship would be a `Post` type (like a blog post) that has an `Author`. -_Creating the transformation relationship_ +In this relationship, each object is a distinct entity that exists whether or not the other does. They could each be queried individually. -In order to create a parent/child relationship, when calling `createNode` for the child node, the new node object that is passed in should have a `parent` key with the value set to the parent node's `id`. After this, call the `createParentChildLink` function exported inside `actions`. - -_Examples_ +```graphql +post { + id + title +} +author { + id + name +} +``` -[Here's the above example](https://github.com/gatsbyjs/gatsby/blob/72077527b4acd3f2109ed5a2fcb780cddefee35a/packages/gatsby-transformer-remark/src/on-node-create.js#L39-L67) from the `gatsby-transformer-remark` source plugin. +Each type has independent schemas and field(s) on that reference the other entity -- in this case the `Post` would have an `Author`, and the `Author` might have `Post`s. The API of a service that allows complex object modelling, for example a CMS, will often allow users to add relationships between entities and expose them through the API. This same relationship can be represented by your schema. + +```graphql +post { + id + title + // highlight-start + author { + id + name + } + // highlight-end +} +author { + id + name + // highlight-start + posts { + id + title + } + // highlight-end +} +``` -[Here's another example](https://github.com/gatsbyjs/gatsby/blob/1fb19f9ad16618acdac7eda33d295d8ceba7f393/packages/gatsby-transformer-sharp/src/on-node-create.js#L3-L25) from the `gatsby-transformer-sharp` source plugin. +When an object node is deleted, Gatsby _does not_ delete any referenced entities. When using foreign-key references, it's a source plugin's responsibility to clean up any dangling entity references. -#### Option 2: foreign-key relationships +##### Creating the relationship -An example of a foreign-key relationship would be a Post that has an Author. +Suppose you want to create a relationship between `Post`s and `Author`s in order to query the `author` field on a post: + +```graphql +query { + post { + id + // highlight-start + author { + id + name + } + // highlight-end + } +} +``` -In this relationship, each object is a distinct entity that exists whether or not the other does, with independent schemas, and field(s) on each entity that reference the other entity -- in this case the Post would have an Author, and the Author might have Posts. The API of a service that allows complex object modelling, for example a CMS, will often allow users to add relationships between entities and expose them through the API. +For Gatsby to automatically infer a relationship, you need to create a field called `author___NODE` on the Post object to hold the relationship to Authors before you create the node. The value of this field should be the node ID of the Author. -When an object node is deleted, Gatsby _does not_ delete any referenced entities. When using foreign-key references, it's a source plugin's responsibility to clean up any dangling entity references. +```javascript:title=source-plugin/gatsby-node.js +exports.sourceNodes = ({ actions, createContentDigest }) => { + const { createNode } = actions + createNode({ + // Data for the Post node + author___NODE: ``, // highlight-line + // Required fields + id: `a-node-id`, + parent: null + children: [], + internal: { + type: `post`, + contentDigest: createContentDigest(fieldData), + } + }) +} +``` -##### Creating the relationship +For a stricter GraphQL schema, you can specify the exact field and value to link nodes using schema customization APIs. -Suppose you want to create a relationship between Posts and Authors, and you want to call the field `author`. +```javascript:title=source-plugin/gatsby-node.js +exports.sourceNodes = ({ actions, createContentDigest }) => { + const { createNode } = actions + createNode({ + // Data for the Post node + // highlight-start + author: { + name: `Jay Gatsby`, + }, + // highlight-end + // Required fields + id: `a-node-id`, + parent: null + children: [], + internal: { + type: `post`, + contentDigest: createContentDigest(fieldData), + } + }) +} -Before you pass the Post object and Author object into `createNode` and create the respective nodes, you need to create a field called `author___NODE` on the Post object to hold the relationship to Authors. The value of this field should be the node ID of the Author. +exports.createSchemaCustomization = ({ actions }) => { + const { createTypes } = actions + createTypes(` + type Post implements Node { + id: ID! + # create a relationship between Post and the File nodes for optimized images + author: Author @link(from: "author.name" by: "name") // highlight-line + # ... other fields + }`) +} +``` ##### Creating the reverse relationship -It's often convenient for querying to add to the schema backwards references. For example, you might want to query the Author of a Post but you might also want to query all the posts an author has written. +It's often convenient for querying to add to the schema backwards references. For example, you might want to query the author of a post, but you might also want to query all the posts an author has written. -If you want to call this field on `Author` `posts`, you would create a field called `posts___NODE` to hold the relationship to Posts. The value of this field should be an array of Post IDs. +If you want to call a field to access the author on the `Post` nodes using the inference method, you would create a field called `posts___NODE` to hold the relationship to posts. The value of this field should be an array of `Post` IDs. Here's an example from the [WordPress source plugin](https://github.com/gatsbyjs/gatsby/blob/1fb19f9ad16618acdac7eda33d295d8ceba7f393/packages/gatsby-source-wordpress/src/normalize.js#L178-L189). +With schema customization, you would add the `@link` directive to your Author type. The `@link` directive will look for an ID on the `post` field of the Author nodes, which can be added when the Author nodes are created. + +```javascript:title=source-plugin/gatsby-node.js +exports.createSchemaCustomization = ({ actions }) => { + const { createTypes } = actions + createTypes(` + type Post implements Node { + id: ID! + # create a relationship between Post and the File nodes for optimized images + author: Author @link(from: "author.name" by: "name") // highlight-line + # ... other fields + } + + type Author implements Node { + name: String! + post: Post @link // highlight-line + }`) +} +``` + +You can read more about connecting foreign key fields with schema customization in the guide on [customizing the GraphQL schema](/docs/schema-customization/#foreign-key-fields). + +#### Option 2: transformation relationships + +When a node is _completely_ derived from another node you'll want to use a transformation relationship. An example that is common in source plugins is for transforming File nodes from remote sources, e.g. images. You can read about this use case in the section below on [sourcing images from remote locations](/docs/creating-a-source-plugin/#sourcing-images-from-remote-locations). + +You can find more information about transformation relationships in the [creating a transformer plugin guide](/docs/creating-a-transformer-plugin/#creating-the-transformer-relationship). + #### Union types -When creating fields linking to an array of nodes, if the array of IDs are all of the same type, the relationship field that is created will be of this type. If the linked nodes are of different types; the field will turn into a union type of all types that are linked. See the [GraphQL documentation on how to query union types](https://graphql.org/learn/schema/#union-types). +For either type of relationship you can link a field to an array of nodes. If the array of IDs all correspond to nodes of the same type, the relationship field that is created will be of this type. If the linked nodes are of different types the field will turn into a union type of all types that are linked. See the [GraphQL documentation on how to query union types](https://graphql.org/learn/schema/#union-types). + +### Working with data received from remote sources + +#### Setting media and MIME types + +Each node created by the filesystem source plugin includes the raw content of the file and its _media type_. + +[A **media type**](https://en.wikipedia.org/wiki/Media_type) (also **MIME type** and **content type**) is an official way to identify the format of files/content that are transmitted via the internet, e.g. over HTTP or through email. You might be familiar with other media types such as `application/javascript`, `audio/mpeg`, `text/html`, etc. + +Each source plugin is responsible for setting the media type for the nodes it creates. This way, source and transformer plugins can work together easily. + +This is not a required field -- if it's not provided, Gatsby will [infer](/docs/glossary#inference) the type from data that is sent -- but it's how source plugins indicate to transformers that there is "raw" data the transformer can further process. + +It also allows plugins to remain small and focused. Source plugins don't have to have opinions on how to transform their data: they can set the `mediaType` and push that responsibility to transformer plugins instead. + +For example, it's common for services to allow you to add content in Markdown format. If you pull that Markdown into Gatsby and create a new node, what then? How would a user of your source plugin convert that Markdown into HTML they can use in their site? You would create a node for the Markdown content and set its `mediaType` as `text/markdown` and the various Gatsby Markdown transformer plugins would see your node and transform it into HTML. -#### Further specification +This loose coupling between the data source and the transformer plugins allow Gatsby site builders to assemble complex data transformation pipelines with little work on their (and your (the source plugin author)) part. -See -[_Node Link_](/docs/api-specification/) in the API Specification concepts -section for more info. +#### Sourcing and optimizing images from remote locations + +A common use case for source plugins is pulling images from a remote location and optimizing them for use with [Gatsby Image](/packages/gatsby-image/). An API may return a URL for an image on a CDN, which could be further optimized by Gatsby at build time. + +This can be achieved by the following steps: + +1. Install `gatsby-source-filesystem` as a dependency in your source plugin: + +``` +npm install gatsby-source-filesystem +``` + +2. Create File nodes using the `createRemoteFileNode` function exported by `gatsby-source-filesystem`: + +```javascript:title=source-plugin/gatsby-node.js +const { createRemoteFileNode } = require(`gatsby-source-filesystem`) + +exports.onCreateNode = async ({ + actions: { createNode }, + getCache, + createNodeId, + node, +}) => { + // because onCreateNode is called for all nodes, verify that you are only running this code on nodes created by your plugin + if (node.internal.type === `your-source-node-type`) { + // create a FileNode in Gatsby that gatsby-transformer-sharp will create optimized images for + const fileNode = await createRemoteFileNode({ + // the url of the remote image to generate a node for + url: node.imgUrl, + getCache, + createNode, + createNodeId, + parentNodeId: node.id, + }) + } +} +``` + +3. Add the ID of the new File node to your source plugin's node. + +```javascript:title=source-plugin/gatsby-node.js +const { createRemoteFileNode } = require(`gatsby-source-filesystem`) + +exports.onCreateNode = async ({ + actions: { createNode }, + getCache, + createNodeId, + node, +}) => { + // because onCreateNode is called for all nodes, verify that you are only running this code on nodes created by your plugin + if (node.internal.type === `your-source-node-type`) { + // create a FileNode in Gatsby that gatsby-transformer-sharp will create optimized images for + const fileNode = await createRemoteFileNode({ + // the url of the remote image to generate a node for + url: node.imgUrl, + getCache, + createNode, + createNodeId, + parentNodeId: node.id, + }) + + // highlight-start + if (fileNode) { + // with schemaCustomization: add a field `remoteImage` to your source plugin's node from the File node + node.remoteImage = fileNode.id + + // OR with inference: link your source plugin's node to the File node without schemaCustomization like this, but creates a less sturdy schema + node.remoteImage___NODE = fileNode.id + } + // highlight-end + } +} +``` + +Attaching `fileNode.id` to `remoteImage___NODE` will rely on Gatsby's [inference](/docs/glossary/#inference) of the GraphQL schema to create a new field `remoteImage` as a relationship between the nodes. This is done automatically. For a sturdier schema, you can relate them using [`schemaCustomization` APIs](/docs/node-apis/#createSchemaCustomization) by adding the `fileNode.id` to a field that you reference when you `createTypes`: + +```javascript:title=source-plugin/gatsby-node.js +exports.createSchemaCustomization = ({ actions }) => { + const { createTypes } = actions + createTypes(` + type YourSourceType implements Node { + id: ID! + # create a relationship between YourSourceType and the File nodes for optimized images + remoteImage: File @link // highlight-line + }`) +} +``` + +4. Verify that `gatsby-plugin-sharp` and `gatsby-transformer-sharp` are included in the site that is using the plugin: + +```javascript:title=gatsby-config.js +module.exports = { + plugins: [ + // loads the source-plugin + `your-source-plugin`, + // required to generate optimized images + `gatsby-plugin-sharp`, + `gatsby-transformer-sharp`, + ], +} +``` + +Then, the sharp plugins will automatically transform the File nodes created by `createRemoteFileNode` in `your-source-plugin` (which have supported image extensions like .jpg or .png). You can then query for the `remoteImage` field on your source type: + +```graphql +query { + yourSourceType { + id + remoteImage { + childImageSharp { + # fluid or fixed fields for optimzed images + } + } + } +} +``` ### Improve plugin developer experience by enabling faster sync -One tip to improve the development experience of using a plugin is to reduce the time it takes to sync between Gatsby and the data source. There are two approaches for doing this: +One challenge when developing locally is that a developer might make modifications in a remote data source, like a CMS, and then want to see how it looks in the local environment. Typically they will have to restart the `gatsby develop` server to see changes. In order to improve the development experience of using a plugin, you can reduce the time it takes to sync between Gatsby and the data source by enabling faster synchronization of data changes. There are two approaches for doing this: + +- **Proactively fetch updates**. You can avoid having to restart the `gatsby develop` server by proactively fetching updates from the remote server. For example, [gatsby-source-sanity](https://github.com/sanity-io/gatsby-source-sanity) listens to changes to Sanity content when `watchMode` is enabled and pulls them into the Gatsby develop server. The [example source plugin repository](https://github.com/gatsbyjs/gatsby/tree/master/examples/creating-source-plugins) uses GraphQL subscriptions to listen for changes and update data. +- **Add event-based sync**. Some data sources keep event logs and are able to return a list of objects modified since a given time. If you're building a source plugin, you can store the last time you fetched data using the [cache](/docs/creating-a-source-plugin/#caching-data-between-runs) or [`setPluginStatus`](/docs/actions/#setPluginStatus) and then only sync down nodes that have been modified since that time. [gatsby-source-contentful](https://github.com/gatsbyjs/gatsby/tree/master/packages/gatsby-source-contentful) is an example of a source plugin that does this. + +If possible, the proactive listener approach creates the best experience if existing APIs in the data source can support it (or you have access to build support into the data source). + +The code to support this behavior looks like this: + +```javascript:title=source-plugin/gatsby-node.js +exports.sourceNodes = async ({ actions }, pluginOptions) => { + const { createNode, touchNode, deleteNode } = actions + + // highlight-start + // touch nodes to ensure they aren't garbage collected + getNodesByType(`YourSourceType`).forEach(node => touchNode({ nodeId: node.id })) + + // ensure a plugin is in a preview mode and/or supports listening + if (pluginOptions.preview) { + const subscription = await subscription(SUBSCRIPTION_TO_WEBSOCKET) + subscription.subscribe(({ data: newData }) => { + newData.forEach(newDatum => { + switch (newDatum.status) { + case "deleted": + deleteNode({ + node: getNode(createNodeId(`YourSourceType-${newDatum.uuid}`)), + }) + break + case "created": + case "updated": + default: + // created and updated can be handled by the same code path + // the post's id is presumed to stay constant (or can be inferred) + createNode(processDatum(newDatum)) + break + ) + } + }) + } + // highlight-end + + const data = await client.query(QUERY_TO_API) + + // Process data and create nodes.using a custom processDatum function + data.forEach(datum => createNode(processDatum(datum))) + + // You're done, return. + return +} +``` + +_Note: This is pseudo code to illustrate the logic and concept of how these plugins function, you can see an example in the [creating source plugins](https://github.com/gatsbyjs/gatsby/tree/master/examples/creating-source-plugins) repository._ + +Because the code in `sourceNodes` is reinvoked when changes in the data source occur, a few steps need to be taken to ensure that Gatsby is tracking the existing nodes as well as the new data. A first step is ensuring that the existing nodes created are not garbage collected which is done by "touching" the nodes with the [`touchNode` action](/docs/actions/#touchNode). -- **Add event-based sync**. Some data sources keep event logs and are able to return a list of objects modified since a given time. If you're building a source plugin, you can store - the last time you fetched data using - [`setPluginStatus`](/docs/actions/#setPluginStatus) and then only sync down nodes that have been modified since that time. [gatsby-source-contentful](https://github.com/gatsbyjs/gatsby/tree/master/packages/gatsby-source-contentful) is an example of a source plugin that does this. -- **Proactively fetch updates**. One challenge when developing locally is that a developer might make modifications in a remote data source, like a CMS, and then want to see how it looks in the local environment. Typically they will have to restart the `gatsby develop` server to see changes. This can be avoided if your source plugin knows to proactively fetch updates from the remote server. For example, [gatsby-source-sanity](https://github.com/sanity-io/gatsby-source-sanity), listens to changes to Sanity content when `watchMode` is enabled and pulls them into the Gatsby develop server. +Then the new data needs to be pulled in via a live update like a websocket (in the example above with a subscription). The new data needs to have some information attached that dictates whether the data was created, updated, or deleted; that way, when it is processed, a new node can be created/updated (with `createNode`) or deleted (with `deleteNode`). In the example above that information is coming from `newDatum.status`. ## Additional resources -- Tutorial: [Creating a Pixabay Image Source Plugin](/tutorial/pixabay-source-plugin-tutorial/) +- Working example repository on [creating source plugins](https://github.com/gatsbyjs/gatsby/tree/master/examples/creating-source-plugins) with the features in this guide implemented +- Tutorial on [Creating a Pixabay Image Source Plugin](/tutorial/pixabay-source-plugin-tutorial/) +- [`gatsby-node-helpers`](https://github.com/angeloashmore/gatsby-node-helpers), a community-made npm package with helper functions to generate Node objects with required fields like IDs and the `contentDigest` MD5 hash. diff --git a/docs/docs/creating-a-transformer-plugin.md b/docs/docs/creating-a-transformer-plugin.md index 5ec01d2a7945a..19358dc4989eb 100644 --- a/docs/docs/creating-a-transformer-plugin.md +++ b/docs/docs/creating-a-transformer-plugin.md @@ -102,6 +102,8 @@ Now you have a `File` node to work with: Now, transform the newly created `File` nodes by hooking into the `onCreateNode` API in `gatsby-node.js`. +#### Convert yaml into JSON for storage in Gatsby nodes + If you're following along in an example project, install the following packages: ```shell @@ -137,41 +139,67 @@ File content: Parsed YAML content: -```javascript -;[ +```json +[ { - id: "Jane Doe", - bio: "Developer based in Somewhere, USA", + "id": "Jane Doe", + "bio": "Developer based in Somewhere, USA" }, { - id: "John Smith", - bio: "Developer based in Maintown, USA", - }, + "id": "John Smith", + "bio": "Developer based in Maintown, USA" + } ] ``` Now you'll write a helper function to transform the parsed YAML content into new Gatsby nodes: -```javascript +```javascript:title=gatsby-node.js function transformObject(obj, id, type) { const yamlNode = { ...obj, id, children: [], - parent: node.id, + parent: null, internal: { contentDigest: createContentDigest(obj), type, }, } createNode(yamlNode) - createParentChildLink({ parent: node, child: yamlNode }) } ``` Above, you create a `yamlNode` object with the shape expected by the [`createNode` action](/docs/actions/#createNode). -You then create a link between the parent node (file) and the child node (yaml content). +#### Creating the transformer relationship + +You then need to create a link between the parent node (file) and the child node (yaml content) using the `createParentChildLink` function after adding the parent node's id to the `yamlNode`: + +```javascript:title=gatsby-node.js +function transformObject(obj, id, type) { + const yamlNode = { + ...obj, + id, + children: [], + parent: node.id, // highlight-line + internal: { + contentDigest: createContentDigest(obj), + type, + }, + } + createNode(yamlNode) + createParentChildLink({ parent: node, child: yamlNode }) // highlight-line +} +``` + +Another example of a transformation relationship is the `gatsby-source-filesystem` plugin used with the `gatsby-transformer-remark` plugin. This combination transforms a parent `File` node's markdown string into a `MarkdownRemark` node. The remark transformer plugin adds its newly created child node as a child of the parent node using the action [`createParentChildLink`](/docs/actions/#createParentChildLink). Transformation relationships like this are used when a new node is _completely_ derived from a single parent node. E.g. the markdown node is derived from the parent `File` node and would not exist if the parent `File` node hadn't been created. + +Because all children nodes are derived from their parent, when a parent node is deleted or changed, Gatsby deletes all of the child nodes (and their child nodes, and so on). Gatsby does so with the expectation that they'll be recreated again by transformer plugins. This is done to ensure there are no nodes left over that were derived from older versions of data but should no longer exist. + +_For examples of other plugins creating transformation relationships, you can see the [`gatsby-transformer-remark` plugin](https://github.com/gatsbyjs/gatsby/blob/72077527b4acd3f2109ed5a2fcb780cddefee35a/packages/gatsby-transformer-remark/src/on-node-create.js#L39-L67) (from the above example) or the [`gatsby-transformer-sharp` plugin](https://github.com/gatsbyjs/gatsby/blob/1fb19f9ad16618acdac7eda33d295d8ceba7f393/packages/gatsby-transformer-sharp/src/on-node-create.js#L3-L25)._ + +#### Create new nodes from the derived data In your updated `gatsby-node.js`, you'll then iterate through the parsed YAML content, using the helper function to transform each into a new node: @@ -227,6 +255,8 @@ async function onCreateNode({ exports.onCreateNode = onCreateNode ``` +#### Query for the transformed data + Now you can query for your new nodes containing our transformed YAML data: ```graphql diff --git a/docs/docs/deploying-to-firebase.md b/docs/docs/deploying-to-firebase.md index 2fb8a440705ab..d0d159489f34a 100644 --- a/docs/docs/deploying-to-firebase.md +++ b/docs/docs/deploying-to-firebase.md @@ -47,61 +47,61 @@ In this guide, you will learn how to deploy your Gatsby site to Firebase Hosting 1. Update the `firebase.json` with the following cache settings -```json -{ - "hosting": { - "public": "public", - "ignore": ["firebase.json", "**/.*", "**/node_modules/**"], - "headers": [ - { - "source": "**/*", - "headers": [ - { - "key": "cache-control", - "value": "cache-control: public, max-age=0, must-revalidate" - } - ] - }, - { - "source": "static/**", - "headers": [ - { - "key": "cache-control", - "value": "public, max-age=31536000, immutable" - } - ] - }, - { - "source": "**/*.@(css|js)", - "headers": [ - { - "key": "cache-control", - "value": "public, max-age=31536000, immutable" - } - ] - }, - { - "source": "sw.js", - "headers": [ - { - "key": "cache-control", - "value": "cache-control: public, max-age=0, must-revalidate" - } - ] - }, - { - "source": "page-data/**", - "headers": [ - { - "key": "cache-control", - "value": "cache-control: public, max-age=0, must-revalidate" - } - ] - } - ] - } -} -``` + ```json + { + "hosting": { + "public": "public", + "ignore": ["firebase.json", "**/.*", "**/node_modules/**"], + "headers": [ + { + "source": "**/*", + "headers": [ + { + "key": "cache-control", + "value": "cache-control: public, max-age=0, must-revalidate" + } + ] + }, + { + "source": "static/**", + "headers": [ + { + "key": "cache-control", + "value": "public, max-age=31536000, immutable" + } + ] + }, + { + "source": "**/*.@(css|js)", + "headers": [ + { + "key": "cache-control", + "value": "public, max-age=31536000, immutable" + } + ] + }, + { + "source": "sw.js", + "headers": [ + { + "key": "cache-control", + "value": "cache-control: public, max-age=0, must-revalidate" + } + ] + }, + { + "source": "page-data/**", + "headers": [ + { + "key": "cache-control", + "value": "cache-control: public, max-age=0, must-revalidate" + } + ] + } + ] + } + } + ``` 1. Prepare your site for deployment by running `gatsby build`. This generates a publishable version of your site in the `public` folder. diff --git a/docs/docs/deploying-to-gatsby-cloud.md b/docs/docs/deploying-to-gatsby-cloud.md new file mode 100644 index 0000000000000..cf72236660d4e --- /dev/null +++ b/docs/docs/deploying-to-gatsby-cloud.md @@ -0,0 +1,125 @@ +--- +title: Deploying to Gatsby Cloud +--- + +This guide will walk you through building and deploying your Gatsby site on [Gatsby Cloud](https://www.gatsbyjs.com/cloud). + +## Why Use Gatsby Cloud + +Gatsby Cloud is a platform of stable, trusted tools launched by the team behind Gatsby.js in late 2019 that enables web creators to build better websites. It offers unique features that remove friction in your team's workflow including: + +- [**Autoprovisioning**](https://www.gatsbyjs.com/docs/autoprovisioning) that empowers new users to create projects in minutes with a Content Management System (CMS), sample content, and connected Gatsby starter. +- [**Real-time Preview**](https://www.gatsbyjs.com/docs/viewing-preview/) to simplify content creation and collaboration. Preview offers a private playground for developers, designers, marketers, and content creators by providing a shareable temporary URL for viewing changes immediately and in context. With instant updates triggered by a CMS or webhooks, it’s a shareable, hot-reloading preview. +- **Numerous CMS Integrations**, many of which are automatic +- **Builds** is the fastest continuous deployment solution for Gatsby sites and apps- up to 20x faster build times compared to other solutions. Build with Gatsby and deploy to your favorite CDN. +- **Reports** provide automated Lighthouse performance checks and deploy previews to fix errors before they’re published. + +Best of all, Gatsby Cloud includes a [free tier](https://www.gatsbyjs.com/pricing/) designed to comfortably support personal and small sites. + +## Integrations + +Gatsby Cloud integrates with the tools you already use to build sites. By connecting your Gatsby project's Github repo, Gatsby Cloud automatically builds and deploys your site when you make changes. + +### CMS Integrations + +Gatsby Cloud offers integrations with a wide variety of CMSs. The below CMSs have first-class, automatic integrations with Gatsby Cloud: + +- Contentful +- Cosmic JS +- Dato CMS +- Sanity.io + +If you want to work with a CMS without automatic integration support you still can. There are specific documents available for the below integration points: + +- Contentstack +- Drupal +- Strapi + +In addition, Gatsby Cloud offers a POST endpoint for manually integrating with CMSs that support webhooks. + +### Hosting Integrations + +Gatsby Cloud offers automatic integration with the following hosting providers: + +- Netlify +- Amazon S3 +- Firebase +- Google Cloud Storage +- Fastly + +Please refer to the [Gatsby Cloud Docs](https://www.gatsbyjs.com/docs/) for full details on available integrations. + +## Set up a new Gatsby site from scratch + +1. Head over to [Gatsby Cloud](https://www.gatsbyjs.com/get-started/) and sign-up/sign-in with your GitHub account if you haven't already. +2. Click the **Create new site** button on your [Dashboard](https://www.gatsbyjs.com/dashboard/sites). +3. Choose the **I don't have a Gatsby site yet** option. +4. On _Tab 1_ choose from the Starter options and click **Next**. +5. On _Tab 2_, enter a name for your new project and click **Next**. This will be the name of the project repo added to your GitHub account. + + > Note, you will need to set your [GitHub permissions](https://github.com/settings/installations) to enable "All Repositories" access in order to allow Gatsby Cloud to create a new repo. + +6. On _Tab 3_, click **Connect** to authenticate with your chosen CMS provider. + + > If you're looking for instructions on configuring Gatsby Cloud with a specific CMS, check out the [Gatsby Cloud Docs](https://www.gatsbyjs.com/docs/). + +7. Once you successfully configure your CMS, click **Start my site**, prompting Gatsby Cloud to provision your starter project. +8. On _Tab 4_ click **Finish**. On your site's dashboard page, under the _Production_ tab, you'll see an in-progress build. + + > Once the build has finished, a URL will appear for you to view the live build of your site. + +9. Under the _Preview_ tab you can find your site's preview URL. + + > This preview URL will allow your team to make changes to your CMS and automatically view updates to your site in real-time without having to rebuild. + +10. If a build fails, you can click **View Details** to view the warning, errors, and raw logs for the build. + +## Set up an existing Gatsby site + +1. Head over to [Gatsby Cloud](https://www.gatsbyjs.com/get-started/) and sign-up/sign-in with your GitHub account if you haven't already. +2. Click the **Create new site** button on your [Dashboard](https://www.gatsbyjs.com/dashboard/sites). +3. Choose the **I already have a Gatsby site** option. +4. On _Tab 1_, select your repo containing your Gatsby site from the list of options. + + > If you don't see your repo in the list, you can adjust your repository access by clicking the **Connect a new GitHub Organization** link or configuring the Gatsby Cloud app installation in your [GitHub settings](https://github.com/settings/installations). + +5. With your repo selected, you can modify the _Production Branch_ and _Base Directory_ that will be used to build and deploy your site. + + > If you are setting up a monorepo, you will need to set the _Base Directory_ as the directory containing your Gatsby project. Gatsby Cloud supports `npm`, `yarn`, yarn workspaces and `lerna` with yarn workspaces or `npm`. + +6. Click the **Next** button. On _Tab 2_ you can choose from the automatic integration providers to connect one or more CMSs to provide data to your Gatsby Cloud preview instance. +7. By clicking **Connect** next to any integration option, you will be prompted to authenticate with that CMS platform and choose your data source within that CMS. + + > If you're looking for instructions on configuring Gatsby Cloud with a specific CMS, check out the [Gatsby Cloud Docs](https://www.gatsbyjs.com/docs/). + + > If your CMS provider isn't listed but supports webhooks, you can attempt to manually connect to it using the Gatsby Cloud POST endpoint. This will keep your Preview automatically updated when you make changes to your CMS. + +8. Once you have connected your desired integrations, click **Set up your site**. On _Tab 3_, you can configure any environment variables that you wish to set for your preview and build. +9. Click **Create site**. You will be brought to the dashboard page for your site and under the _Production_ tab you will see that your build has been triggered and is underway. + + > Once the build has finished, a URL will appear for you to view the live build of your site. + +10. Under the _Preview_ tab you can find your site's preview URL. + + > This preview URL will allow your team to make changes to your CMS and automatically view updates to your site in real time without having to rebuild. + +11. If a build fails, you can click **View Details** to view the warning, errors, and raw logs for the build. + +## Set up hosting for your site + +1. Navigate to your site in your Gatsby Cloud [Dashboard](https://www.gatsbyjs.com/dashboard/sites). Under the _Site Settings_ tab, navigate to the _Integrations_ > _Hosting_ subsection. +2. To setup an integration, click **Connect** next to the hosting provider of your choice. +3. Follow the prompts to authorize with your hosting provider. + + > The [Gatsby Cloud Docs](https://www.gatsbyjs.com/docs/) provide specific tutorials for each hosting provider. + +4. At this point your hosting integration should be setup. + + > You can verify this by returning to _Site Settings_ > _Integrations_ > _Hosting_ where your new integration should show as _Connected_ in green. + +5. Now when a build is triggered, your site will be deployed to your hosting target(s). + +## Additional Resources + +- [Gatsby Cloud Docs](https://www.gatsbyjs.com/docs/) +- [Announcing Gatsby Cloud](/blog/2019-11-14-announcing-gatsby-cloud/) diff --git a/docs/docs/gatsby-on-windows.md b/docs/docs/gatsby-on-windows.md index 5d4b28baa7dd4..ff6b2eab854b5 100644 --- a/docs/docs/gatsby-on-windows.md +++ b/docs/docs/gatsby-on-windows.md @@ -11,7 +11,7 @@ Tools). The recommended way to setup your build environment on Windows is to install the [`windows-build-tools`](https://github.com/felixrieseberg/windows-build-tools) -package by running `npm install windows-build-tools -g` on an admin PowerShell +package by running `npm install --global windows-build-tools --vs2015` on an admin PowerShell console. Upon installing this package, it downloads and installs Visual C++ Build Tools 2015, provided free of charge by Microsoft. These tools are required to compile popular native modules. It will also install Python 2.7, configuring diff --git a/docs/docs/glossary.md b/docs/docs/glossary.md index f3979ab7781a5..d2dbb5c97a0ae 100644 --- a/docs/docs/glossary.md +++ b/docs/docs/glossary.md @@ -176,7 +176,7 @@ A hosting provider keeps a copy of your website or app and makes it accessible t A feature in use when you run `gatsby develop` that live updates your site on save of code in a text editor by automatically replacing modules, or chunks of code, in an open browser window. -### Hydration +### [Hydration](/docs/glossary/hydration/) Once a site has been [built](#build) by Gatsby and loaded in a web browser, [client-side](#client-side) JavaScript assets will download and turn the site into a full React application that can manipulate the [DOM](#dom). This process is often called re-hydration as it runs some of the same JavaScript code used to generate Gatsby pages, but this time with browser DOM APIs like `window` available. @@ -220,9 +220,9 @@ A way of writing HTML content with plain text, using special characters to denot ## N -### NPM +### [npm](/docs/glossary/npm) -[Node](#node) [Package](#package) Manager. Allows you to install and update other packages that your project depends on. [Gatsby](#gatsby) and [React](#react) are examples of your project's dependencies. See also: [Yarn](#yarn). +[Node](#node) [package](#package) manager. Allows you to install and update other packages that your project depends on. [Gatsby](#gatsby) and [React](#react) are examples of your project's dependencies. See also: [Yarn](#yarn). ### Node diff --git a/docs/docs/glossary/hydration.md b/docs/docs/glossary/hydration.md new file mode 100644 index 0000000000000..b6d45d88f49c3 --- /dev/null +++ b/docs/docs/glossary/hydration.md @@ -0,0 +1,53 @@ +--- +title: Hydration +disableTableOfContents: true +--- + +Learn what _hydration_ means, and how Gatsby makes use of React's hydration features to build blazing fast websites and applications. + +## What is hydration? + +_Hydration_ is the process of using client-side JavaScript to add application state and interactivity to server-rendered HTML. It's a feature of [React](/docs/glossary/react/), one of the underlying tools that makes the Gatsby framework. Gatsby uses hydration to transform the static HTML created at [build time](/docs/glossary/build/) into a React application. + +A typical React application relies on client-side rendering. Instead of parsing HTML to create the [DOM](/docs/glossary#dom), client-side rendering uses JavaScript to create it. A minimal HTML document serves as the application container, and only contains links to the JavaScript and CSS necessary to render the application. + +```html + + + + + + ExampleApp + + + + +
+ + + +``` + +With client-side rendering, most actions trigger local DOM updates instead of network requests. Clicking a clicking a navigation link builds the requested page on the client instead of requesting it from the server. Because they make fewer network requests, applications rendered in the browser provide a blazing-fast user experience — after the initial download. + +That's the drawback to client-side rendering: none of your site's content is visible or interactive until the client downloads JavaScript and builds the DOM. However, not all clients can construct a DOM. For example, client-side rendering can prevent search engine and social media crawlers from consuming and indexing your site's URLs. Browser users, on the other hand, may see a blank page or loading image while your JavaScript bundle downloads and executes. + +[Server-side rendering](/docs/glossary/server-side-rendering/) makes HTML available to the client _before_ JavaScript loads. Your site visitors can see and read your content even if it is not fully interactive. Server rendering eliminates the blank page problem. Rendered HTML also makes it easier for search engine and social media crawlers to consume your site. Of course, server-side rendering also has a drawback: every URL request requires another round trip to the server. + +Hydration lets us take a hybrid approach. + +> **Note:** You'll sometimes see developers use _re-hydration_ used instead of _hydration_. They're interchangeable. + +Gatsby's build process uses [Node.js](/docs/glossary/node/) and [`ReactDOMServer`](https://reactjs.org/docs/react-dom-server.html) to create two different versions of your site. Each URL is available as both a static HTML page, and as a JavaScript component. + +When a visitor requests their first URL from your site, the response contains static HTML along with linked JavaScript, CSS, and images. React then takes over and _hydrates_ that HTML. React adds event listeners to the DOM created during HTML parsing, and turns your site into a full React application. Subsequent page requests are DOM updates managed by React. + +### Learn More + +- [Understanding React Hydration](https://www.gatsbyjs.org/docs/react-hydration/) from the Gatsby docs +- [ReactDOM.hydrate()](https://reactjs.org/docs/react-dom.html#hydrate) from the React API Reference +- [Rendering on the Web](https://developers.google.com/web/updates/2019/02/rendering-on-the-web) from Google diff --git a/docs/docs/glossary/npm.md b/docs/docs/glossary/npm.md new file mode 100644 index 0000000000000..f6caff679440f --- /dev/null +++ b/docs/docs/glossary/npm.md @@ -0,0 +1,59 @@ +--- +title: npm or Node package manager +disableTableOfContents: true +--- + +Learn what _npm_ is, how to use it, and how it fits in to the Gatsby ecosystem. + +## What is npm? + + + npm +, or Node package manager, is the default package manager for the [Node.js](/docs/glossary/node) JavaScript runtime. It lets you install and update libraries and frameworks (dependencies) for Node-based projects, and interact with the npm Registry. You'll use npm to install and upgrade Gatsby and its plugins. + +npm is a [command line](/docs/glossary#command-line) tool. You'll need Terminal (Mac, Linux) or Command Prompt (Windows) in order to run its commands. To use one of npm's features, type `npm ` . For example, `npm help` displays a list of available features, including `install`, `uninstall`, `update`, and `search`. + +npm is installed alongside Node during the default [installation process](/tutorial/part-zero/#install-nodejs-for-your-appropriate-operating-system). You don't need to take any additional steps to add it to your environment. + +### Using npm to install Gatsby + +You'll need to install Gatsby globally to use Gatsby CLI commands such as `gatsby new`. To do so, use `npm install` with the `--global` or `-g` flag. + +```shell +npm install -g gatsby-cli +``` + +Once the installation completes, you can run `gatsby new my-project` to create a new Gatsby project. + +### Using npx to install Gatsby + +> **Note:** `npx` requires npm version 5.2 or later. If you've installed the latest versions of Node and npm, you should also have npx. Otherwise, you should upgrade Node and/or npm. + +You can also use [npx](https://www.npmjs.com/package/npx) to install Gatsby. npx ships with npm. It allows you to install a package and run a command in one step. For example, instead of running `npm install -g gatsby-cli` then `gatsby new my-project`, you could use the following command. + +```shell +npx gatsby new my-project +``` + +This will download and install the latest version of Gatsby, then create a new Gatsby project in the `my-project` folder. Choosing this method will not make the Gatsby CLI globally available, however. If you install Gatsby using npx, you'll need to use `npx gatsby` or `npm run` to execute Gatsby commands, e.g.: `npx gatsby develop` or `npm run develop`. + +### Using npm to install Gatsby plugins + +Gatsby has a robust collection of [plugins](/plugins/) that add functionality or data sourcing to your Gatsby sites. Adding a plugin as a project dependency uses the same process as installing Gatsby itself. Use `npm install `. To add the [gatsby-source-filesystem](/packages/gatsby-source-filesystem), plugin, for example, you'd use the following command. + +```shell +npm install gatsby-source-filesystem +``` + +> **Note:** Use `npm install` to add plugins, even if you installed Gatsby using npx. + +> **Note:** You'll still need to update `gatsby-config.js` to add the plugin's functionality to your site. + +This will update the dependencies list of `package.json` and `package-lock.json`. Commit both files to your project's repository. Doing so makes it easy to keep your Gatsby project consistent across team members and computers. When another team member clones your repository, they can use `npm install` to install the dependencies included in `package-lock.json`. + +### Learn more about npm + +- [npm](https://www.npmjs.com/) official website +- [Node.js](https://nodejs.org/en/) official website +- [An introduction to the npm package manager](https://nodejs.dev/an-introduction-to-the-npm-package-manager) from Nodejs.dev +- [Set Up Your Development Environment](/tutorial/part-zero/) from the Gatsby docs diff --git a/docs/docs/images/cloud-lighthouse.png b/docs/docs/images/cloud-lighthouse.png new file mode 100644 index 0000000000000..b2840ed728bcd Binary files /dev/null and b/docs/docs/images/cloud-lighthouse.png differ diff --git a/docs/docs/mdx/programmatically-creating-pages.md b/docs/docs/mdx/programmatically-creating-pages.md index c84402cf24500..0393573b580e9 100644 --- a/docs/docs/mdx/programmatically-creating-pages.md +++ b/docs/docs/mdx/programmatically-creating-pages.md @@ -274,13 +274,17 @@ component should look like: ```jsx:title=src/components/posts-page-layout.js import React from "react" import { graphql } from "gatsby" +import { MDXProvider } from "@mdx-js/react" import { MDXRenderer } from "gatsby-plugin-mdx" +import { Link } from "gatsby" export default function PageTemplate({ data: { mdx } }) { return (

{mdx.frontmatter.title}

- {mdx.body} + + {mdx.body} +
) } diff --git a/docs/docs/recipes.md b/docs/docs/recipes.md index 2a3ef48c2e7da..46e2ac3cae6d4 100644 --- a/docs/docs/recipes.md +++ b/docs/docs/recipes.md @@ -125,3 +125,4 @@ Showtime. Once you are happy with your site, you are ready to go live with it! - [Deploying to Netlify](/docs/recipes/deploying-your-site#deploying-to-netlify) - [Deploying to ZEIT Now](/docs/recipes/deploying-your-site#deploying-to-zeit-now) - [Deploying to Cloudflare Workers](/docs/recipes/deploying-your-site#deploying-to-cloudflare-workers) +- [Setting up Google Analytics](/docs/recipes/deploying-your-site#setting-up-google-analytics) diff --git a/docs/docs/recipes/deploying-your-site.md b/docs/docs/recipes/deploying-your-site.md index 6b7e88aec739b..1427c82012069 100644 --- a/docs/docs/recipes/deploying-your-site.md +++ b/docs/docs/recipes/deploying-your-site.md @@ -130,3 +130,59 @@ Use [`wrangler`](https://developers.cloudflare.com/workers/tooling/wrangler/) to ### Additional resources - [Hosting on Cloudflare](/docs/deploying-to-cloudflare-workers) + +## Setting up Google Analytics + +Use `gatsby-plugin-google-analytics` to track site activity and provide insights into how users access your website. + +### Prerequisites + +- A [Gatsby site](/docs/quick-start) with a `gatsby-config.js` file and an `index.js` page +- The [Gatsby CLI](/docs/gatsby-cli) installed +- A domain from your provider of choice, e.g. [AWS](https://aws.amazon.com/getting-started/tutorials/get-a-domain/) + +### Verify the domain in search.google.com + +1. Navigate to the [Google search console](<(https://search.google.com/search-console/not-verified)>) to verify the domain by clicking on **Search Property** > **Add Property**. Type in your domain and press Continue. +2. Add a **TXT** record to your DNS configuration. Follow the directions for your provider, or refer to the [Google documentation](https://support.google.com/a/answer/183895?hl=en). + +### Linking the domain to Google Analytics admin + +1. Log into [Google Analytics](https://analytics.google.com/analytics/). +2. Click **Admin**. +3. Select **Create Property** in the Property column. +4. Choose **Web**. +5. Fill in the details and click **Create**. + +### Getting your Google Analytics `Tracking ID` + +1. Sign in to your Google Analytics account. +2. Click **Admin**. +3. Select an account from the menu in the ACCOUNT column. +4. Select a property from the menu in the PROPERTY column. +5. Under Property, click **Tracking Info** > **Tracking Code**. Your Tracking ID is displayed at the top of the page. + +### Using the ID in the plugin + +1. Run `npm install gatsby-plugin-google-analytics` in your terminal. +2. Add the following to your `gatsby-config.js` file. + +```javascript:title="gatsby-config.js" +module.exports = { + plugins: [ + { + resolve: `gatsby-plugin-google-analytics`, + options: { + // replace "UA-XXXXXXXXX-X" with your own Tracking ID + trackingId: "UA-XXXXXXXXX-X", + }, + }, + ], +}` +``` + +3. Build and deploy your site to start seeing traffic in your [Google Analytics dashboard](https://analytics.google.com/analytics/web/). + +### Additional resources + +- [adding-analytics](https://www.gatsbyjs.org/docs/adding-analytics/) diff --git a/docs/docs/sourcing-from-graphcms.md b/docs/docs/sourcing-from-graphcms.md index 4ea87069f9a59..a994d9e251cc2 100644 --- a/docs/docs/sourcing-from-graphcms.md +++ b/docs/docs/sourcing-from-graphcms.md @@ -35,7 +35,7 @@ You can install this component with: ### Configure the plugin -The last step required before you can query your data is to configure the `gatsby-source-graphql` plugin. Open `gatsby-config.js` and add the following object to the plugins array. This example uses an open API from GraphCMS but you will most likely want to replace this with your own API and provide a fieldName that makes the most sense to your project. [Here's more information on working with GraphCMS APIs.](https://docs.graphcms.com/developers/api) +The last step required before you can query your data is to configure the `gatsby-source-graphql` plugin. Open `gatsby-config.js` and add the following object to the plugins array. This example uses an open API from GraphCMS but you will most likely want to replace this with your own API and provide a fieldName that makes the most sense to your project. [Here's more information on working with GraphCMS APIs.](https://docs.graphcms.com/docs/api) ```js { diff --git a/docs/docs/tailwind-css.md b/docs/docs/tailwind-css.md index e7b9ff5be2dfd..4e8cb0bfc8838 100644 --- a/docs/docs/tailwind-css.md +++ b/docs/docs/tailwind-css.md @@ -66,7 +66,7 @@ To learn more about how to use Tailwind in your CSS, visit the [Tailwind Documen ### Option #2: CSS-in-JS -These steps assume you have a CSS-in-JS library already installed, and the examples are based on Styled Components. +These steps assume you have a CSS-in-JS library already installed, and the examples are based on Emotion. 1. Install Tailwind Babel Macro @@ -108,7 +108,7 @@ npm install tailwind.macro 2. Use the Babel Macro (`tailwind.macro`) in your styled component ```javascript -import styled from "styled-components" +import styled from "@emotion/styled" import tw from "tailwind.macro" // All versions diff --git a/docs/docs/telemetry.md b/docs/docs/telemetry.md index 7bf87537eb415..ea9c2fc1c7125 100644 --- a/docs/docs/telemetry.md +++ b/docs/docs/telemetry.md @@ -32,13 +32,13 @@ We use these metrics to better understand the usage patterns. These metrics will Specifically, we collect the following information for _all_ telemetry events: -- Timestamp of the occurrence -- Command invoked (e.g. `build` or `develop`) -- Gatsby machine ID. This is generated with UUID and stored in global gatsby config at ~/.config/gatsby/config.json. -- Unique session ID. This is generated on each run with UUID. -- One-way hash of the current working directory or a hash of the git remote -- General OS level information (operating system, version, CPU architecture, and whether the command is run inside a CI) -- Current Gatsby version +- Timestamp of the occurrence. +- Command invoked (e.g. `build` or `develop`). +- Gatsby machine ID: This is generated with UUID and stored in global gatsby config at `~/.config/gatsby/config.json`. +- Unique session ID: This is generated on each run with UUID. +- One-way hash of the current working directory or a hash of the git remote. +- General OS level information (operating system, version, CPU architecture, and whether the command is run inside a CI). +- Current Gatsby version. The access to the raw data is highly controlled, and we cannot identify individual users from the dataset. It is anonymized and untraceable back to the user. diff --git a/docs/docs/third-party-graphql.md b/docs/docs/third-party-graphql.md index 723e7db95a24d..d9b90ed34ab4f 100644 --- a/docs/docs/third-party-graphql.md +++ b/docs/docs/third-party-graphql.md @@ -100,3 +100,4 @@ exports.createPages = async ({ actions, graphql }) => { - [Example with GraphCMS](https://github.com/freiksenet/gatsby-graphcms) - [Example with Hasura](https://github.com/hasura/graphql-engine/tree/master/community/sample-apps/gatsby-postgres-graphql) - [Example with AWS AppSync](https://github.com/aws-samples/aws-appsync-gatsby-sample) +- [Example with Dgraph](https://github.com/dgraph-io/gatsby-dgraph-graphql) diff --git a/docs/docs/using-cloudinary-image-service.md b/docs/docs/using-cloudinary-image-service.md new file mode 100644 index 0000000000000..fab73d0bd0d6c --- /dev/null +++ b/docs/docs/using-cloudinary-image-service.md @@ -0,0 +1,156 @@ +--- +title: Using Cloudinary image service for media optimization +--- + +Cloudinary is a cloud-based end-to-end media management platform that provides solutions to help site creators serve optimized media files (images and videos) to their audiences. It also provides a lot of optional transformations that can be carried out on these media assets. + +In this guide you will take a look at the [gatsby-source-cloudinary](/packages/gatsby-source-cloudinary/) and [gatsby-transformer-cloudinary](/packages/gatsby-transformer-cloudinary/) plugins which you can use to improve the experience of handling images on Gatsby sites. + +Plugins are generally used to abstract functionality in Gatsby. In this case, the `gatsby-source-cloudinary` plugin is a [source plugin](/docs/creating-a-source-plugin/) which helps to connect Cloudinary media storage capabilities to your site. + +> Here's a [demo site that uses the gatsby-source-cloudinary](https://gsc-sample.netlify.com) showcasing optimized images in a masonry grid, served from Cloudinary. + +## The problem with handling images on the web + +Dealing with images on the web has always been a problem as unoptimized images can slow down your site. The processes put in place to create the best media experience can take a lot of time to implement. + +## Solutions Cloudinary provides + +Cloudinary provides a couple of amazing solutions to this problem, namely: + +- Remote storage and delivery of images via CDN +- Offers a wider range of transformations than [gatsby-image](/docs/using-gatsby-image/). +- [Digital Asset Management](https://cloudinary.com/documentation/digital_asset_management_overview) for enterprise assets + +## Gatsby-source-cloudinary + +This plugin fetches media assets from Cloudinary that are specified in a folder. It then transforms these images into Cloudinary file nodes, which can be queried with GraphQL in a Gatsby project. +`gatsby-source-cloudinary` applies [f_auto and q_auto](https://cloudinary.com/documentation/image_transformations) transformation parameters which aid in automatic optimisation of format and quality for media assets by over 70 percent. + +### Prerequisites + +Before using the `gatsby-source-cloudinary` plugin you should do the following: + +- Upload your images to a folder on Cloudinary. This folder can have any name of your choosing. +- Obtain your API key and API secret from your Cloudinary dashboard. +- Have the [dotenv](https://www.npmjs.com/package/dotenv) module installed for loading environment variables from a `.env` file. + +### Using gatsby-source-cloudinary + +Add the plugin to your project. + +1. Install `gatsby-source-cloudinary` + +```shell + npm install gatsby-source-cloudinary +``` + +2. In the root of your project, create an environment file called `.env` and add your Cloudinary credentials and their values + +``` +CLOUDINARY_API_KEY=xxxxxxxxxxxxxx +CLOUDINARY_API_SECRET=xxxxxxxxxxxxxxxxxxxx +CLOUDINARY_CLOUD_NAME=xxxxx +``` + +3. Configure `gatsby-config.js` + +```js:title=gatsby-config.js +require('dotenv').config(); // highlight-line +module.exports = { + ... + plugins:[ + ... + { + resolve: `gatsby-source-cloudinary`, + options: { + cloudName: process.env.CLOUDINARY_CLOUD_NAME, + apiKey: process.env.CLOUDINARY_API_KEY, + apiSecret: process.env.CLOUDINARY_API_SECRET, + resourceType: `image`, + prefix: `gatsby-source-cloudinary/` + } + } + ] +} +``` + +Note that `gatsby-source-cloudinary` takes the following options: + +- **`cloudName`** , **`apiKey`** , and **`apiSecret`** **:** These are credentials from your Cloudinary console, stored as three separate environment variables for security. +- **`resourceType`** **:** This is the resource type of the media assets - either an image or a video. +- **`prefix`** **:** This is the folder (in your Cloudinary account) in which the files reside. In the example above, the folder is called `gatsby-source-cloudinary`. Assign a name of your choice. + Other optional options are `type`, `tags`, and `maxResult`. + +Here's a [link to the README](https://github.com/Chuloo/gatsby-source-cloudinary#query-parameters) for more information. + +## Gatsby-transformer-cloudinary + +After sourcing media files from Cloudinary, you will be able to leverage Cloudinary’s media transformation capabilities. To do so, use `gatsby-transformer-cloudinary` which is a type of [transformer plugin](/docs/creating-a-transformer-plugin/) that is used to change image formats, styles and dimensions. It also optimizes images for minimal file size alongside high visual quality for an improved user experience and minimal bandwidth. + +Here's a [demo site that uses the gatsby-transformer-plugin](https://gatsby-transformer-cloudinary.netlify.com/fluid/) + +### Prerequisites + +Before using the `gatsby-transformer-cloudinary` plugin you should do the following: + +- Upload your images to a folder on Cloudinary. This folder can have any name of your choosing. +- Have the `gatsby-source-cloudinary` plugin installed and configured. +- Obtain your API key and API secret from your Cloudinary dashboard. +- Have the [dotenv](https://www.npmjs.com/package/dotenv) module installed for loading environment variables from a `.env` file. + +### Using gatsby-transformer-cloudinary + +1. Install `gatsby-transformer-cloudinary` and `gatsby-source-filesystem` which creates the File nodes that the Cloudinary transformer plugin works on. + +```shell + npm install gatsby-transformer-cloudinary gatsby-source-filesystem +``` + +2. In the root of your project, create an environment file called `.env` to which to add your Cloudinary credentials and their values. + +``` +CLOUDINARY_API_KEY=xxxxxxxxxxxxxx +CLOUDINARY_API_SECRET=xxxxxxxxxxxxxxxxxxxx +CLOUDINARY_CLOUD_NAME=xxxxx +``` + +3. Configure `gatsby-config.js` + +```js:title=gatsby-config.js +require('dotenv').config({ + path: `.env.${process.env.NODE_ENV}`, +}); + +module.exports = { + plugins: [ + { + resolve: `gatsby-source-filesystem`, + options: { + name: `images`, + path: `${__dirname}/src/images`, + }, + }, + { + resolve: 'gatsby-transformer-cloudinary', + options: { + cloudName: process.env.CLOUDINARY_CLOUD_NAME, + apiKey: process.env.CLOUDINARY_API_KEY, + apiSecret: process.env.CLOUDINARY_API_SECRET, + + // This folder will be created if it doesn’t exist. + uploadFolder: 'gatsby-cloudinary', + + }, + ], +}; +``` + +> In `gatsby-config.js`, responsive breakpoints can be created for each image, use the `fluidMaxWidth` and `fluidMinWidth` options to set them. Take a look at the [plugin documentation](https://www.npmjs.com/package/gatsby-transformer-cloudinary#api) for more information on how these parameters can be set. + +## Additional resources + +- [Faster Sites with Optimized Media Assets by William Imoh](/blog/2020-01-12-faster-sites-with-optimized-media-assets/) +- [Gatsby Transformer Cloudinary](https://www.npmjs.com/package/gatsby-transformer-cloudinary) +- [Gatsby Source Cloudinary](/packages/gatsby-source-cloudinary/) +- [Aspect ratio parameter](https://cloudinary.com/documentation/image_transformation_reference#aspect_ratio_parameter) diff --git a/docs/docs/using-gatsby-image.md b/docs/docs/using-gatsby-image.md index 5e45a5510462d..17b18536a6e10 100644 --- a/docs/docs/using-gatsby-image.md +++ b/docs/docs/using-gatsby-image.md @@ -29,7 +29,7 @@ But creating optimized images for websites has long been a thorny problem. Ideal - Use the “blur-up” technique or a “traced placeholder” SVG to show a preview of the image while it loads - Hold the image position so your page doesn’t jump while the images load -Doing this consistently across a site feels like Sisyphean labor. You manually optimize your images and then… several images are swapped in at the last minute or a design-tweak shaves 100px of width off your images. +Doing this consistently across a site feels like a task that can never be completed. You manually optimize your images and then… several images are swapped in at the last minute or a design-tweak shaves 100px of width off your images. Most solutions involve a lot of manual labor and bookkeeping to ensure every image is optimized. @@ -83,30 +83,31 @@ module.exports = { ```jsx:title=src/pages/my-dogs.js import React from "react" -import { useStaticQuery, graphql } from "gatsby" // highlight-line +import { graphql } from "gatsby" // highlight-line import Layout from "../components/layout" -export default () => { - // highlight-start - const data = useStaticQuery(graphql` - query MyQuery { - file(relativePath: { eq: "images/corgi.jpg" }) { - childImageSharp { - # Specify the image processing specifications right in the query. - fluid { - ...GatsbyImageSharpFluid - } - } - } - } - `) - // highlight-end +export default ({ data }) => { return (

I love my corgi!

) } + +// highlight-start +export const query = graphql` + query MyQuery { + file(relativePath: { eq: "images/corgi.jpg" }) { + childImageSharp { + # Specify the image processing specifications right in the query. + fluid { + ...GatsbyImageSharpFluid + } + } + } + } +` +// highlight-end ``` { ```jsx:title=src/pages/my-dogs.js import React from "react" -import { useStaticQuery, graphql } from "gatsby" +import { graphql } from "gatsby" import Layout from "../components/layout" import Img from "gatsby-image" // highlight-line -export default () => { - const data = useStaticQuery(graphql` - query MyQuery { - file(relativePath: { eq: "images/corgi.jpg" }) { - childImageSharp { - # Specify the image processing specifications right in the query. - fluid { - ...GatsbyImageSharpFluid - } - } - } - } - `) +export default ({ data }) => { return (

I love my corgi!

@@ -147,6 +136,19 @@ export default () => {
) } + +export const query = graphql` + query MyQuery { + file(relativePath: { eq: "images/corgi.jpg" }) { + childImageSharp { + # Specify the image processing specifications right in the query. + fluid { + ...GatsbyImageSharpFluid + } + } + } + } +` ``` Note: This example configuration assumes your images and Markdown pages are sourced from the same directory. Check out the section on [configuring for different directories](#configuring-for-images-and-posts-in-different-directories) for additional help. @@ -193,6 +197,7 @@ The `gatsby-plugin-mdx` plugin will be used in the example below. Put the `gatsb module.exports = { plugins: [ `gatsby-plugin-sharp`, + `gatsby-remark-images`, { resolve: `gatsby-plugin-mdx`, options: { diff --git a/docs/sites.yml b/docs/sites.yml index e2a026814dd88..a609893e108b6 100644 --- a/docs/sites.yml +++ b/docs/sites.yml @@ -9978,6 +9978,32 @@ built_by: Built by Rebels Ltd. built_by_url: https://builtbyrebels.com/ featured: false +- title: OCIUS + url: https://www.ocius.com.au/ + main_url: https://www.ocius.com.au/ + source_url: https://github.com/ocius/website + description: > + Ocius Technology Ltd (formerly Solar Sailor Holdings Ltd) is an Australian public unlisted company with Research and Development facilities at the University of NSW. + categories: + - Business + - Technology + - Science + built_by: Sergey Monin + built_by_url: https://build-in-saratov.com/ +- title: Kosmos & Kaos + main_url: https://www.kosmosogkaos.is/ + url: https://www.kosmosogkaos.is/ + description: > + A carefully designed user experience is good business. + categories: + - Design + - Consulting + - Agency + - Web Development + - JavaScript + built_by: Kosmos & Kaos + built_by_url: https://www.kosmosogkaos.is/ + featured: false - title: Design Portfolio of Richard Bruskowski main_url: https://bruskowski.design/ url: https://bruskowski.design/ @@ -10234,3 +10260,117 @@ - Food - Marketing built_by: Tyson Foods, Inc. +- title: TSUKUTTEMITA LAB + main_url: https://create.kayac.com/ + url: https://create.kayac.com/ + description: KAYAC private works + featured: false + categories: + - Portfolio + - Technology + - Entertainment + built_by: KAYAC inc. +- title: Brad Garropy + url: https://bradgarropy.com + main_url: https://bradgarropy.com + source_url: https://github.com/bradgarropy/bradgarropy.com + categories: + - Blog + - Education + - Entertainment + - JavaScript + - Open Source + - Portfolio + - Programming + - SEO + - Technology + - Web Development + built_by: Brad Garropy + built_by_url: https://twitter.com/bradgarropy +- title: mrkaluzny + main_url: https://mrkaluzny.com + url: https://mrkaluzny.com + description: > + Web designer and web developer specializing in providing services for SME sector. + featured: false + categories: + - Web Development + - Programming + - Business + - Portfolio + - Freelance + built_by: Wojciech Kaluzny +- title: The COVID Tracking Project + url: https://covidtracking.com/ + main_url: https://covidtracking.com/ + source_url: https://github.com/COVID19Tracking/website + description: > + The COVID Tracking Project collects and publishes the most complete testing data available for US states and territories. + categories: + - Media + - Healthcare + built_by: The COVID Tracking Project Web Team + built_by_url: https://github.com/COVID19Tracking/website/graphs/contributors +- title: The Gauntlet Coverage of COVID-19 in Canada + url: https://covid19.thegauntlet.ca + main_url: https://covid19.thegauntlet.ca + description: > + Tracking The Spread of Coronavirus in Canada + categories: + - Media + - Education + built_by: Masoud Karimi + built_by_url: https://github.com/masoudkarimif +- title: Zestard Technologies + main_url: https://www.zestard.com + url: https://www.zestard.com + description: > + Zestard Technologies is an eCommerce Specialist company focusing on Magento & Shopify as a core expertise. + categories: + - Web Development + - WordPress + - Technology + - Agency + - E-commerce + built_by: Zestard Technologies + built_by_url: https://www.zestard.com +- title: Kostas Vrouvas + main_url: https://kosvrouvas.com + url: https://kosvrouvas.com + featured: false + categories: + - Blog + - Portfolio + built_by: Kostas Vrouvas +- title: Hanare Cafe in Toshijima, Toba, Japan + main_url: https://hanarecafe.com + url: https://hanarecafe.com + source_url: https://github.com/mnishiguchi/hanarecafe-gatsby + description: > + A website for a cafe/bakery located in Toshijima, a beautiful sightseeing spot just a 20-minutes ferry ride from downtown Toba, Japan. + categories: + - Food + - Travel + built_by: Masatoshi Nishiguchi + built_by_url: https://mnishiguchi.com + featured: false +- title: Jamify.me + description: > + We build websites & PWAs with JAMstack. Delivering faster, more secure web. + main_url: https://jamify.me + url: https://jamify.me + categories: + - Agency + - Web Development + featured: false +- title: "Due to COVID-19: Documenting the Signs of the Pandemic" + url: https://duetocovid19.com + main_url: https://duetocovid19.com + description: > + A project to document all the signs that have gone up on the storefronts of our cities in response to the coronavirus pandemic. + categories: + - Photography + - Community + built_by: Andrew Louis + built_by_url: https://hyfen.net + featured: false diff --git a/docs/starters.yml b/docs/starters.yml index d0ea35108e73a..6706871f23d2e 100644 --- a/docs/starters.yml +++ b/docs/starters.yml @@ -1,3 +1,16 @@ +- url: https://22boxes-gatsby-uno.netlify.com/ + repo: https://github.com/iamtherealgd/gatsby-starter-22boxes-uno + description: A Gatsby starter for creating blogs and showcasing your work + tags: + - Blog + - Portfolio + - Markdown + - SEO + features: + - Work and About pages + - Work page with blog type content management + - Personal webiste to create content and put your portfolio items + - Landing pages for your work items, not just links - url: https://gatsby-wordpress-libre.netlify.com/ repo: https://github.com/armada-inc/gatsby-wordpress-libre-starter description: A Gatsby starter for creating blogs from headless WordPress CMS. @@ -1869,19 +1882,24 @@ - Full Render Control with Portable Text - gatsby-image support - Content types for company info, pages, projects, people, and blog posts -- url: https://gatsby-starter-under-construction.netlify.com/ - repo: https://github.com/robinmetral/gatsby-starter-under-construction - description: Blazing fast "Under Construction" page with a blazing quick setup. +- url: https://gatsby-starter-oss.netlify.com/ + repo: https://github.com/robinmetral/gatsby-starter-oss + description: A Gatsby starter to showcase your open-source projects. tags: - - Onepage + - Portfolio + - Styling:Theme-UI - Styling:CSS-in-JS - - SEO + - Onepage - PWA + - SEO + - Testing + - Linting features: - - Configure everything in gatsby-config.js - - Creative CSS3 background patterns by Lea Verou - - Built-in Google Fonts support - - Social icons with react-social-icons + - 🐙🐈 Pull your pinned repos from GitHub + - 👩‍🎤 Style with Emotion + - ✨ Themeable with Theme UI + - 🚀 Powered by gatsby-theme-oss + - 💯 100/100 Lighthouse scores - url: https://gatsby-starter-docz.netlify.com/ repo: https://github.com/RobinCsl/gatsby-starter-docz description: Simple starter where building your own documentation with Docz is possible @@ -4411,7 +4429,7 @@ - HTML5UP - Styling:SCSS features: - - Kentico Kontent Caas plafrorm as the data source + - Kentico Kontent CaaS platform as the data source - Landing page divided by section. - Support for code syntax highlighting - Includes plugins for easy, beautiful typography @@ -5282,12 +5300,13 @@ - Language:TypeScript - Linting - Netlify + - Testing features: - - ✔️ Gatsby - - ✔️ TypeScript - - ✔️ Prettier - - ✔️ ESLint - - ✔️ Deploy to Netlify through GitHub Actions + - TypeScript + - ESLint for JS linting + - Prettier code formatting + - Jest for testing + - Deploy to Netlify through GitHub Actions - url: https://answer.netlify.com/ repo: https://github.com/passwd10/gatsby-starter-answer description: A simple Gatsby blog to show your Future Action on top of the page @@ -5850,3 +5869,77 @@ - Automatic Linting on Commit using husky and pretty-quick - Custom server to test Production Builds on your local network via zeit/serve - Extensive Readme in the repo +- url: https://gatsby-ts-tw-styled-eslint.netlify.com + repo: https://github.com/Miloshinjo/gatsby-ts-tw-styled-eslint-starter + description: Gatsby starter with Typescript, TailwindCSS, @emotion/styled and eslint. + tags: + - Linting + - Styling:CSS-in-JS + - Styling:Tailwind + - Language:TypeScript + features: + - Typescript support + - CSS-in-JS with @emotion/styled (like styled components) + - TailwindCSS (1.2) support + - eslint with airbnb settings +- url: https://mik3y.github.io/gatsby-starter-basic-bootstrap/ + repo: https://github.com/mik3y/gatsby-starter-basic-bootstrap + description: A barebones starter featuring react-bootstrap and deliberately little else + tags: + - Styling:Bootstrap + - Styling:SCSS + features: + - Uses react-bootstrap, sass, and little else + - Skeleton starter, based on gatsby-starter-default + - Optional easy integration of themes from Bootswatch.com +- url: https://gatsby-starter-songc.netlify.com/ + repo: https://github.com/FFM-TEAM/gatsby-starter-song + description: A Gatsby starter for blog style with fresh UI. + tags: + - Blog + - Netlify + - SEO + - Language:TypeScript + - Styling:CSS-in-JS + features: + - Emoji (emojione) + - Code syntax highlighting (atom-one-light Style) + - Mobile friendly and fully responsive + - Comment feature ( utterances) + - Post side PostTOC + - Simple fresh design like Medium + - Readability +- url: https://gatsby-starter-kontent-lumen.netlify.com/ + repo: https://github.com/Kentico/gatsby-starter-kontent-lumen + description: A minimal, lightweight, and mobile-first starter for creating blogs uses Gatsby and Kentico Kontent CMS. Inspired by Lumen. + tags: + - SEO + - CMS:Headless + - CMS:Kontent + - Netlify + - Styling:SCSS + - Blog + features: + - Kentico Kontent CaaS platform as the data source. + - Mobile-First approach in development. + - Archive organized by tags and categories. + - Automatic Sitemap generation. + - Lost Grid. + - Beautiful typography inspired by matejlatin/Gutenberg. + - Stylesheet built using Sass and BEM-Style naming. + - Syntax highlighting in code blocks. + - Google Analytics support. +- url: https://rolwinreevan.com + repo: https://github.com/rolwin100/rolwinreevan_gatsby_blog + description: This starter consists of ant design system you can use it for your personal blog. I have give a lot of time in developing this starter because I found that there were not much starters with a very good design. Please give a star to this project if you have like it to encourage me 😄. Thank you. + tags: + - Blog + - Portfolio + - Markdown + - SEO + - PWA + features: + - Blog designed using Markdown. + - Beautifully designed landing page. + - First project in the starters list to use ant design. + - Supports SSR and is also a PWA. diff --git a/docs/tutorial/seo-and-social-sharing-cards-tutorial/index.md b/docs/tutorial/seo-and-social-sharing-cards-tutorial/index.md index 607f026a74588..6a80451ab5f90 100644 --- a/docs/tutorial/seo-and-social-sharing-cards-tutorial/index.md +++ b/docs/tutorial/seo-and-social-sharing-cards-tutorial/index.md @@ -117,7 +117,7 @@ function SEO({ description, lang, meta }) { // highlight-start { name: "keywords", - content: data.site.siteMetadata.keywords.join(","), + content: site.siteMetadata.keywords.join(","), }, // highlight-end ]} @@ -176,7 +176,7 @@ function SEO({ description, lang, meta, image: metaImage, title }) { // highlight-start const image = metaImage && metaImage.src - ? `${data.site.siteMetadata.siteUrl}${metaImage.src}` + ? `${site.siteMetadata.siteUrl}${metaImage.src}` : null // highlight-end @@ -194,7 +194,7 @@ function SEO({ description, lang, meta, image: metaImage, title }) { }, { name: "keywords", - content: data.site.siteMetadata.keywords.join(","), + content: site.siteMetadata.keywords.join(","), }, { property: `og:title`, @@ -318,12 +318,10 @@ function SEO({ description, lang, meta, image: metaImage, title, pathname }) { const metaDescription = description || site.siteMetadata.description const image = metaImage && metaImage.src - ? `${data.site.siteMetadata.siteUrl}${metaImage.src}` + ? `${site.siteMetadata.siteUrl}${metaImage.src}` : null // highlight-start - const canonical = pathname - ? `${data.site.siteMetadata.siteUrl}${pathname}` - : null + const canonical = pathname ? `${site.siteMetadata.siteUrl}${pathname}` : null // highlight-end return ( @@ -352,7 +350,7 @@ function SEO({ description, lang, meta, image: metaImage, title, pathname }) { }, { name: "keywords", - content: data.site.siteMetadata.keywords.join(","), + content: site.siteMetadata.keywords.join(","), }, { property: `og:title`, diff --git a/examples/creating-source-plugins/README.md b/examples/creating-source-plugins/README.md new file mode 100644 index 0000000000000..fb1face66c326 --- /dev/null +++ b/examples/creating-source-plugins/README.md @@ -0,0 +1,83 @@ +# Creating First Class Gatsby Source Plugins + +Create Gatsby plugins that leverage Gatsby's most impactful native features like remote image optimization, caching, customized GraphQL schemas and node relationships, and more. + +This monorepo serves as an example of a site using a first class source plugin to pull in data from a Node.js API. It is meant to show the 3 pieces that work together when building a source plugin: the API, the site, and the source plugin. + +## Setup + +This monorepo uses yarn workspaces to manage the 3 indivdual projects: + +- api: a Node.js API with in-memory data, and a Post and Author type, as well as support for subscriptions when Posts are mutated +- example-site: a barebones Gatsby site that implements the source plugin +- source-plugin: a plugin that uses several Gatsby APIs to source data from the API, create responsive/optimized images from remote locations, and link the nodes in the example site + +To install dependencies for all projects run the install command in the root of the yarn workspace (which requires yarn to be installed): + +``` +yarn install +``` + +_Note: if you aren't using yarn, you can navigate into each of the 3 folders and run `npm install` instead_ + +Then you can run the api or example projects in separate terminal windows with the commands below. + +For the API which runs at `localhost:4000`, use this command: + +``` +yarn workspace api start +``` + +And to run the example site with `gatsby develop` at `localhost:8000`, use this command: + +``` +yarn workspace example-site develop +``` + +Running the example site also runs the plugin because it is included in the site's config. You'll see output in the console for different functionality and then can open up the browser to `localhost:8000` to see the site. + +## Developing and Experimenting + +You can open up `localhost:4000` with the API running, which will load a GraphQL Playground, which is a GraphQL IDE (like GraphiQL, that Gatsby runs at `localhost:8000/___graphql`) for running queries and mutations on the data from the API. + +You can test a query like this to see data returned: + +```graphql +query { + posts { + id + slug + } +} +``` + +This query will return the IDs for all posts in the API. You can copy one of these IDs and provide it as an argument to a mutation to update information about that post. + +You can run 3 different mutations from the GraphQL Playground (at `localhost:4000`): `createPost`, `updatePost`, and `deletePost`. These methods would mimic CRUD operations happening on the API of the data source like a headless CMS. An example `updatePost` mutation is outlined below. + +When you run a mutation on a post, a subscription event is published, which lets the plugin know it should respond and update nodes. The following mutation can be copied into the left side of the GraphQL playground so long as you replace "post-id" with a value returned for an ID from a query (like the one above). + +```graphql +mutation { + updatePost(id: "post-id", description: "Some data!") { + id + slug + description + } +} +``` + +The website's homepage will update with any changes while the source plugin is subscribed to changes, which is when the `preview: true` is provided in the example site's `gatsby-config`. + +You can also optionally listen for subscription events with this query in the playground which will display data when a mutation is run: + +```graphql +subscription { + posts { + id + description + } +} +``` + +A similar subscription is registered when the plugin is run, so you can also see subscription events logged when the plugin is running. diff --git a/examples/creating-source-plugins/api/README.md b/examples/creating-source-plugins/api/README.md new file mode 100644 index 0000000000000..c9fa5261a5770 --- /dev/null +++ b/examples/creating-source-plugins/api/README.md @@ -0,0 +1,3 @@ +# Example API + +A small GraphQL server with in-memory data, powered by [graphql-yoga](https://github.com/graphcool/graphql-yoga) 🧘. See the root of the monorepo for details about running this API alongisde the `example-site` and `source-plugin`. diff --git a/examples/creating-source-plugins/api/package.json b/examples/creating-source-plugins/api/package.json new file mode 100644 index 0000000000000..48e9f8626d1e9 --- /dev/null +++ b/examples/creating-source-plugins/api/package.json @@ -0,0 +1,43 @@ +{ + "name": "api", + "description": "A simple GraphQL server example with in-memory data", + "version": "1.0.0", + "license": "MIT", + "homepage": "https://general-repair.glitch.me", + "author": { + "name": "Risan Bagja Pradana", + "email": "risanbagja@gmail.com", + "url": "https://risan.io" + }, + "main": "src/index.js", + "repository": { + "type": "git", + "url": "git+https://github.com/risan/simple-graphql-server-example.git" + }, + "bugs": { + "url": "https://github.com/risan/simple-graphql-server-example/issues" + }, + "keywords": [ + "graphql", + "graphql-server", + "graphql-yoga" + ], + "scripts": { + "start": "node src/index.js", + "lint": "eslint *.js src", + "lint-fix": "eslint *.js src --fix" + }, + "dependencies": { + "dotenv": "^5.0.1", + "graphql-yoga": "^1.8.2", + "uniqid": "^4.1.1" + }, + "devDependencies": { + "eslint": "^4.19.1", + "eslint-config-airbnb-base": "^12.1.0", + "eslint-config-prettier": "^2.9.0", + "eslint-plugin-import": "^2.10.0", + "eslint-plugin-prettier": "^2.6.0", + "prettier": "^1.11.1" + } +} diff --git a/examples/creating-source-plugins/api/src/index.js b/examples/creating-source-plugins/api/src/index.js new file mode 100644 index 0000000000000..70b3e159cfa73 --- /dev/null +++ b/examples/creating-source-plugins/api/src/index.js @@ -0,0 +1,138 @@ +require("dotenv").config() +const { GraphQLServer, PubSub } = require("graphql-yoga") +const uniqid = require("uniqid") + +const CREATED = "created" +const UPDATED = "updated" +const DELETED = "deleted" + +const authors = [ + { + id: 1, + name: "Jay Gatsby", + }, + { + id: 2, + name: "Daisy Buchanan", + }, +] + +const posts = [ + { + id: uniqid(), + slug: "hello-world", + description: "Our first post on our site.", + imgUrl: "https://images.unsplash.com/photo-1534432586043-ead5b99229fb", + imgAlt: "Pug in a sweater", + authorId: 1, + }, + { + id: uniqid(), + slug: "company-vision", + description: "Our vision for a welcoming company.", + imgUrl: "https://images.unsplash.com/photo-1530041539828-114de669390e", + imgAlt: "Pug in a rainjacket", + authorId: 1, + }, + { + id: uniqid(), + slug: "redesigning-our-logo", + description: "What went into the new logo.", + imgUrl: "https://images.unsplash.com/photo-1541364983171-a8ba01e95cfc", + imgAlt: "Pug in glasses", + authorId: 2, + }, +] + +const resolvers = { + Query: { + info: () => "A simple GraphQL server example with in-memory data.", + posts: () => posts, + authors: () => authors, + }, + + Mutation: { + createPost: (root, { slug, description }) => { + const post = { + id: uniqid(), + slug, + description, + imgUrl: "https://images.unsplash.com/photo-1534432586043-ead5b99229fb", + imgAlt: "pug in a sweater", + authorId: 1, + } + + posts.push(post) + pubsub.publish(CREATED, { posts: [{ status: CREATED, ...post }] }) + + return post + }, + + updatePost: (root, { id, description }) => { + const postIdx = posts.findIndex(p => id === p.id) + + if (postIdx === null) { + return null + } + + posts[postIdx] = { ...posts[postIdx], description } + pubsub.publish(UPDATED, { + posts: [{ status: UPDATED, ...posts[postIdx] }], + }) + + return posts[postIdx] + }, + + deletePost: (root, { id }) => { + const postIdx = posts.findIndex(p => id === p.id) + + if (postIdx === null) { + return null + } + + const post = posts[postIdx] + pubsub.publish(DELETED, { + posts: [{ status: DELETED, ...posts[postIdx] }], + }) + + posts.splice(postIdx, 1) + + return post + }, + }, + + Post: { + id: root => root.id, + slug: root => root.slug, + description: root => root.description, + author: root => authors.find(author => author.id === root.authorId), + }, + + Author: { + id: root => root.id, + name: root => root.name, + }, + + Subscription: { + posts: { + subscribe: (parent, args, { pubsub }) => { + return pubsub.asyncIterator([CREATED, UPDATED, DELETED]) + }, + }, + }, +} + +const pubsub = new PubSub() +const server = new GraphQLServer({ + typeDefs: "./src/schema.graphql", + resolvers, + context: { pubsub }, +}) + +server.start( + { + port: + (process.env.PORT ? parseInt(process.env.PORT, 10) : undefined) || 4000, + }, + ({ port }) => console.log(`🏃🏻‍ Server is running on port ${port}.`) +) diff --git a/examples/creating-source-plugins/api/src/schema.graphql b/examples/creating-source-plugins/api/src/schema.graphql new file mode 100644 index 0000000000000..2dd1f9c5a67c6 --- /dev/null +++ b/examples/creating-source-plugins/api/src/schema.graphql @@ -0,0 +1,30 @@ +type Query { + info: String! + posts: [Post!]! + authors: [Author!]! +} + +type Mutation { + createPost(slug: String!, description: String!): Post! + updatePost(id: ID!, description: String!): Post + deletePost(id: ID!): Post +} + +type Post { + id: ID! + slug: String! + description: String! + imgUrl: String! + imgAlt: String! + author: Author! + status: String +} + +type Author { + id: ID! + name: String! +} + +type Subscription { + posts: [Post!]! +} diff --git a/examples/creating-source-plugins/example-site/README.md b/examples/creating-source-plugins/example-site/README.md new file mode 100644 index 0000000000000..5c03365ce2895 --- /dev/null +++ b/examples/creating-source-plugins/example-site/README.md @@ -0,0 +1,3 @@ +# Example Site + +See the root of the monorepo for details about running this site with the example `source-plugin` installed inside it. diff --git a/examples/creating-source-plugins/example-site/gatsby-config.js b/examples/creating-source-plugins/example-site/gatsby-config.js new file mode 100644 index 0000000000000..1d16c1582500d --- /dev/null +++ b/examples/creating-source-plugins/example-site/gatsby-config.js @@ -0,0 +1,22 @@ +/** + * Configure your Gatsby site with this file. + * + * See: https://www.gatsbyjs.org/docs/gatsby-config/ + */ + +module.exports = { + plugins: [ + // loads the source-plugin + { + resolve: `source-plugin`, + options: { + spaceId: "123", + preview: true, + cacheResponse: false, + }, + }, + // required to generate optimized images + `gatsby-plugin-sharp`, + `gatsby-transformer-sharp`, + ], +} diff --git a/examples/creating-source-plugins/example-site/package.json b/examples/creating-source-plugins/example-site/package.json new file mode 100644 index 0000000000000..51dc41c4900d2 --- /dev/null +++ b/examples/creating-source-plugins/example-site/package.json @@ -0,0 +1,34 @@ +{ + "name": "example-site", + "private": true, + "description": "A simplified bare-bones starter for Gatsby", + "version": "0.1.0", + "license": "MIT", + "scripts": { + "build": "gatsby build", + "develop": "gatsby develop", + "format": "prettier --write \"**/*.{js,jsx,json,md}\"", + "start": "npm run develop", + "serve": "gatsby serve", + "clean": "gatsby clean", + "test": "echo \"Write tests! -> https://gatsby.dev/unit-testing\" && exit 1" + }, + "dependencies": { + "gatsby": "^2.19.45", + "gatsby-image": "^2.3.1", + "gatsby-plugin-sharp": "^2.5.3", + "gatsby-transformer-sharp": "^2.4.3", + "react": "^16.12.0", + "react-dom": "^16.12.0" + }, + "devDependencies": { + "prettier": "^1.19.1" + }, + "repository": { + "type": "git", + "url": "https://github.com/gatsbyjs/gatsby-starter-hello-world" + }, + "bugs": { + "url": "https://github.com/gatsbyjs/gatsby/issues" + } +} diff --git a/examples/creating-source-plugins/example-site/src/pages/index.js b/examples/creating-source-plugins/example-site/src/pages/index.js new file mode 100644 index 0000000000000..e8491f64ec941 --- /dev/null +++ b/examples/creating-source-plugins/example-site/src/pages/index.js @@ -0,0 +1,72 @@ +import React from "react" +import { graphql } from "gatsby" +import Img from "gatsby-image" + +export default ({ data }) => ( +
+

Posts

+
+ {data.allPost.nodes.map(post => ( +
+

{post.slug}

+ By: {post.author.name} +

{post.description}

+ {post.imgAlt} +
+ ))} +
+
+) + +export const query = graphql` + { + allPost { + nodes { + id + slug + description + imgAlt + author { + id + name + } + slug + remoteImage { + id + childImageSharp { + id + fluid { + ...GatsbyImageSharpFluid + } + } + } + } + } + } +` diff --git a/examples/creating-source-plugins/example-site/static/favicon.ico b/examples/creating-source-plugins/example-site/static/favicon.ico new file mode 100644 index 0000000000000..1a466ba8852cf Binary files /dev/null and b/examples/creating-source-plugins/example-site/static/favicon.ico differ diff --git a/examples/creating-source-plugins/package.json b/examples/creating-source-plugins/package.json new file mode 100644 index 0000000000000..34071a17c59ee --- /dev/null +++ b/examples/creating-source-plugins/package.json @@ -0,0 +1,14 @@ +{ + "name": "creating-source-plugins", + "version": "1.0.0", + "description": "Monorepo for examples, api, and plugins for creating first class source plugins", + "main": "index.js", + "author": "@gillkyle", + "license": "MIT", + "workspaces": [ + "api", + "example-site", + "source-plugin" + ], + "private": true +} diff --git a/examples/creating-source-plugins/source-plugin/README.md b/examples/creating-source-plugins/source-plugin/README.md new file mode 100644 index 0000000000000..1d88f045e1984 --- /dev/null +++ b/examples/creating-source-plugins/source-plugin/README.md @@ -0,0 +1,3 @@ +# Example Source Plugin + +See the root of the monorepo for details about running this plugin inside of the `example-site` folder. It is installed in the example site and can be debugged and developed while running there. diff --git a/examples/creating-source-plugins/source-plugin/gatsby-node.js b/examples/creating-source-plugins/source-plugin/gatsby-node.js new file mode 100644 index 0000000000000..935297827e34f --- /dev/null +++ b/examples/creating-source-plugins/source-plugin/gatsby-node.js @@ -0,0 +1,274 @@ +const { createRemoteFileNode } = require(`gatsby-source-filesystem`) +const WebSocket = require("ws") +const { ApolloClient } = require("apollo-client") +const { InMemoryCache } = require("apollo-cache-inmemory") +const { split } = require("apollo-link") +const { HttpLink } = require("apollo-link-http") +const { WebSocketLink } = require("apollo-link-ws") +const { getMainDefinition } = require("apollo-utilities") +const fetch = require("node-fetch") +const gql = require("graphql-tag") + +/** + * ============================================================================ + * Create a GraphQL client to subscribe to live data changes + * ============================================================================ + */ + +// Create an http link: +const httpLink = new HttpLink({ + uri: "http://localhost:4000", + fetch, +}) + +// Create a WebSocket link: +const wsLink = new WebSocketLink({ + uri: `ws://localhost:4000`, + options: { + reconnect: true, + }, + webSocketImpl: WebSocket, +}) + +// using the ability to split links, you can send data to each link/url +// depending on what kind of operation is being sent +const link = split( + // split based on operation type + ({ query }) => { + const definition = getMainDefinition(query) + return ( + definition.kind === "OperationDefinition" && + definition.operation === "subscription" + ) + }, + wsLink, + httpLink +) + +const client = new ApolloClient({ + link, + cache: new InMemoryCache(), +}) + +/** + * ============================================================================ + * Helper functions and constants + * ============================================================================ + */ + +const POST_NODE_TYPE = `Post` +const AUTHOR_NODE_TYPE = `Author` + +// helper function for creating nodes +const createNodeFromData = (item, nodeType, helpers) => { + const nodeMetadata = { + id: helpers.createNodeId(`${nodeType}-${item.id}`), + parent: null, // this is used if nodes are derived from other nodes, a little different than a foreign key relationship, more fitting for a transformer plugin that is changing the node + children: [], + internal: { + type: nodeType, + content: JSON.stringify(item), + contentDigest: helpers.createContentDigest(item), + }, + } + + const node = Object.assign({}, item, nodeMetadata) + helpers.createNode(node) + return node +} + +/** + * ============================================================================ + * Verify plugin loads + * ============================================================================ + */ + +// should see message in console when running `gatsby develop` in example-site +exports.onPreInit = () => console.log("Loaded source-plugin") + +/** + * ============================================================================ + * Link nodes together with a customized GraphQL Schema + * ============================================================================ + */ + +exports.createSchemaCustomization = ({ actions }) => { + const { createTypes } = actions + createTypes(` + type Post implements Node { + id: ID! + slug: String! + description: String! + imgUrl: String! + imgAlt: String! + # create relationships between Post and File nodes for optimized images + remoteImage: File @link + # create relationships between Post and Author nodes + author: Author @link(from: "author.name" by: "name") + } + + type Author implements Node { + id: ID! + name: String! + }`) +} + +/** + * ============================================================================ + * Source and cache nodes from the API + * ============================================================================ + */ + +exports.sourceNodes = async function sourceNodes( + { + actions, + cache, + createContentDigest, + createNodeId, + getNodesByType, + getNode, + }, + pluginOptions +) { + const { createNode, touchNode, deleteNode } = actions + const helpers = Object.assign({}, actions, { + createContentDigest, + createNodeId, + }) + + // you can access plugin options here if need be + console.log(`Space ID: ${pluginOptions.spaceId}`) + + // simple caching example, you can find in .cache/caches/source-plugin/some-diskstore + await cache.set(`hello`, `world`) + console.log(await cache.get(`hello`)) + + // touch nodes to ensure they aren't garbage collected + getNodesByType(POST_NODE_TYPE).forEach(node => touchNode({ nodeId: node.id })) + getNodesByType(AUTHOR_NODE_TYPE).forEach(node => + touchNode({ nodeId: node.id }) + ) + + // listen for updates using subscriptions from the API + if (pluginOptions.preview) { + console.log( + "Subscribing to updates on ws://localhost:4000 (plugin is in Preview mode)" + ) + const subscription = await client.subscribe({ + query: gql` + subscription { + posts { + id + slug + description + imgUrl + imgAlt + author { + id + name + } + status + } + } + `, + }) + subscription.subscribe(({ data }) => { + console.log(`Subscription received:`) + console.log(data.posts) + data.posts.forEach(post => { + const nodeId = createNodeId(`${POST_NODE_TYPE}-${post.id}`) + switch (post.status) { + case "deleted": + deleteNode({ + node: getNode(nodeId), + }) + break + case "created": + case "updated": + default: + // created and updated can be handled by the same code path + // the post's id is presumed to stay constant (or can be inferred) + createNodeFromData(post, POST_NODE_TYPE, helpers) + break + } + }) + }) + } + + // store the response from the API in the cache + const cacheKey = "your-source-data-key" + let sourceData = await cache.get(cacheKey) + + // fetch fresh data if nothiing is found in the cache or a plugin option says not to cache data + if (!sourceData || !pluginOptions.cacheResponse) { + console.log("Not using cache for source data, fetching fresh content") + const { data } = await client.query({ + query: gql` + query { + posts { + id + slug + description + imgUrl + imgAlt + author { + id + name + } + } + authors { + id + name + } + } + `, + }) + await cache.set(cacheKey, data) + sourceData = data + } + + // loop through data returned from the api and create Gatsby nodes for them + sourceData.posts.forEach(post => + createNodeFromData(post, POST_NODE_TYPE, helpers) + ) + sourceData.authors.forEach(author => + createNodeFromData(author, AUTHOR_NODE_TYPE, helpers) + ) + + return +} + +/** + * ============================================================================ + * Transform remote file nodes + * ============================================================================ + */ + +exports.onCreateNode = async ({ + actions: { createNode }, + getCache, + createNodeId, + node, +}) => { + // transfrom remote file nodes using Gatsby sharp plugins + // because onCreateNode is called for all nodes, verify that you are only running this code on nodes created by your plugin + if (node.internal.type === POST_NODE_TYPE) { + // create a FileNode in Gatsby that gatsby-transformer-sharp will create optimized images for + const fileNode = await createRemoteFileNode({ + // the url of the remote image to generate a node for + url: node.imgUrl, + getCache, + createNode, + createNodeId, + parentNodeId: node.id, + }) + + if (fileNode) { + // used to add a field `remoteImage` to the Post node from the File node in the schemaCustomization API + node.remoteImage = fileNode.id + + // inference can link these without schemaCustomization like this, but creates a less sturdy schema + // node.remoteImage___NODE = fileNode.id + } + } +} diff --git a/examples/creating-source-plugins/source-plugin/index.js b/examples/creating-source-plugins/source-plugin/index.js new file mode 100644 index 0000000000000..172f1ae6a468c --- /dev/null +++ b/examples/creating-source-plugins/source-plugin/index.js @@ -0,0 +1 @@ +// noop diff --git a/examples/creating-source-plugins/source-plugin/package.json b/examples/creating-source-plugins/source-plugin/package.json new file mode 100644 index 0000000000000..53449361a87c5 --- /dev/null +++ b/examples/creating-source-plugins/source-plugin/package.json @@ -0,0 +1,28 @@ +{ + "name": "source-plugin", + "version": "1.0.0", + "description": "A minimal boilerplate for the essential files Gatsby looks for in a plugin", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "keywords": [ + "gatsby", + "gatsby-plugin" + ], + "author": "Kyle Gill ", + "license": "MIT", + "dependencies": { + "apollo-cache-inmemory": "^1.6.5", + "apollo-client": "^2.6.8", + "apollo-link": "^1.2.13", + "apollo-link-http": "^1.5.16", + "apollo-link-ws": "^1.0.19", + "apollo-utilities": "^1.3.3", + "gatsby-source-filesystem": "^2.2.2", + "graphql": "^15.0.0", + "graphql-tag": "^2.10.3", + "node-fetch": "^2.6.0", + "ws": "^7.2.3" + } +} diff --git a/package.json b/package.json index 70c0465e96888..3e0502d69e897 100644 --- a/package.json +++ b/package.json @@ -8,6 +8,7 @@ "@types/babel__code-frame": "^7.0.1", "@types/bluebird": "^3.5.30", "@types/cache-manager": "^2.10.2", + "@types/common-tags": "^1.8.0", "@types/eslint": "^6.1.8", "@types/express": "^4.17.3", "@types/fast-levenshtein": "^0.0.1", @@ -18,6 +19,8 @@ "@types/lodash": "^4.14.149", "@types/node": "^12.12.30", "@types/node-fetch": "^2.5.5", + "@types/semver": "^7.1.0", + "@types/signal-exit": "^3.0.0", "@types/react": "^16.9.31", "@types/stack-trace": "^0.0.29", "@types/webpack": "^4.41.7", diff --git a/packages/gatsby-cli/CHANGELOG.md b/packages/gatsby-cli/CHANGELOG.md index a8392ff136ed7..db1f7c9163511 100644 --- a/packages/gatsby-cli/CHANGELOG.md +++ b/packages/gatsby-cli/CHANGELOG.md @@ -3,6 +3,18 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [2.11.7](https://github.com/gatsbyjs/gatsby/compare/gatsby-cli@2.11.6...gatsby-cli@2.11.7) (2020-04-10) + +### Bug Fixes + +- **gatsby-cli:** Address an issue that caused empty logs to print undefined ([#23000](https://github.com/gatsbyjs/gatsby/issues/23000)) ([be85f2e](https://github.com/gatsbyjs/gatsby/commit/be85f2e)) + +## [2.11.6](https://github.com/gatsbyjs/gatsby/compare/gatsby-cli@2.11.5...gatsby-cli@2.11.6) (2020-04-10) + +### Features + +- **gatsby-cli:** allow --recursive git url ([#22747](https://github.com/gatsbyjs/gatsby/issues/22747)) ([f4198e2](https://github.com/gatsbyjs/gatsby/commit/f4198e2)) + ## [2.11.5](https://github.com/gatsbyjs/gatsby/compare/gatsby-cli@2.11.4...gatsby-cli@2.11.5) (2020-04-03) **Note:** Version bump only for package gatsby-cli diff --git a/packages/gatsby-cli/package.json b/packages/gatsby-cli/package.json index 33c6763dd20f7..8148650e7e84e 100644 --- a/packages/gatsby-cli/package.json +++ b/packages/gatsby-cli/package.json @@ -1,7 +1,7 @@ { "name": "gatsby-cli", "description": "Gatsby command-line interface for creating new sites and running Gatsby commands", - "version": "2.11.5", + "version": "2.11.7", "author": "Kyle Mathews ", "bin": { "gatsby": "lib/index.js" diff --git a/packages/gatsby-cli/src/index.ts b/packages/gatsby-cli/src/index.ts index f215a5f04a257..a08464ba5aed9 100755 --- a/packages/gatsby-cli/src/index.ts +++ b/packages/gatsby-cli/src/index.ts @@ -56,7 +56,7 @@ process.on(`unhandledRejection`, reason => { reason = new Error(util.format(reason)) } - report.panic(`UNHANDLED REJECTION`, reason) + report.panic(`UNHANDLED REJECTION`, reason as Error) }) process.on(`uncaughtException`, error => { diff --git a/packages/gatsby-cli/src/init-starter.ts b/packages/gatsby-cli/src/init-starter.ts index 8ed209a001734..5310cd00e0a0e 100644 --- a/packages/gatsby-cli/src/init-starter.ts +++ b/packages/gatsby-cli/src/init-starter.ts @@ -179,9 +179,14 @@ const clone = async (hostInfo: any, rootPath: string): Promise => { report.info(`Creating new site from git: ${url}`) - const args = [`clone`, ...branch, url, rootPath, `--depth=1`].filter(arg => - Boolean(arg) - ) + const args = [ + `clone`, + ...branch, + url, + rootPath, + `--recursive`, + `--depth=1`, + ].filter(arg => Boolean(arg)) await spawnWithArgs(`git`, args) diff --git a/packages/gatsby-cli/src/reporter/__tests__/index.js b/packages/gatsby-cli/src/reporter/__tests__/index.js index 68ac39f2947e2..b0423ebd15381 100644 --- a/packages/gatsby-cli/src/reporter/__tests__/index.js +++ b/packages/gatsby-cli/src/reporter/__tests__/index.js @@ -1,4 +1,4 @@ -const reporter = require(`../index.js`) +const reporter = require(`../`) const reporterActions = require(`../redux/actions`) // TODO: report.error now DOES return something. Get rid of this spying mocking stuff diff --git a/packages/gatsby-cli/src/reporter/__tests__/patch-console.ts b/packages/gatsby-cli/src/reporter/__tests__/patch-console.ts new file mode 100644 index 0000000000000..72d1cb43127e9 --- /dev/null +++ b/packages/gatsby-cli/src/reporter/__tests__/patch-console.ts @@ -0,0 +1,60 @@ +import { patchConsole } from "../patch-console" +import { reporter as gatsbyReporter } from "../reporter" + +describe(`patchConsole`, () => { + const reporter = { + log: jest.fn(), + warn: jest.fn(), + info: jest.fn(), + } + + patchConsole((reporter as unknown) as typeof gatsbyReporter) + ;[`info`, `log`, `warn`].forEach(method => { + describe(method, () => { + beforeEach(reporter[method].mockReset) + + it(`handles an empty call`, () => { + console[method]() + + expect(reporter[method]).toBeCalledWith(``) + }) + + it(`handles multiple arguments`, () => { + console[method](`foo`, `bar`, `baz`) + + expect(reporter[method]).toBeCalledWith(`foo bar baz`) + }) + + it(`handles formatting`, () => { + console[method](`%s %d`, `bar`, true) + + expect(reporter[method]).toBeCalledWith(`bar 1`) + }) + + it(`handles normal values`, () => { + console[method](1) + console[method](0) + console[method](true) + console[method](false) + console[method]([1, true, false, {}]) + console[method]({ 1: 1, true: true, false: `false`, obj: {} }) + + expect(reporter[method].mock.calls[0][0]).toBe(`1`) + expect(reporter[method].mock.calls[1][0]).toBe(`0`) + expect(reporter[method].mock.calls[2][0]).toBe(`true`) + expect(reporter[method].mock.calls[3][0]).toBe(`false`) + expect(reporter[method].mock.calls[4][0]).toBe(`[ 1, true, false, {} ]`) + expect(reporter[method].mock.calls[5][0]).toBe( + `{ '1': 1, true: true, false: 'false', obj: {} }` + ) + }) + + it(`handles undefined variables`, () => { + let a + console[method](a) + + expect(reporter[method]).toBeCalledWith(``) + }) + }) + }) +}) diff --git a/packages/gatsby-cli/src/reporter/catch-exit-signals.ts b/packages/gatsby-cli/src/reporter/catch-exit-signals.ts new file mode 100644 index 0000000000000..67c54680f50e1 --- /dev/null +++ b/packages/gatsby-cli/src/reporter/catch-exit-signals.ts @@ -0,0 +1,42 @@ +/* + * This module is used to catch if the user kills the gatsby process via cmd+c + * When this happens, there is some clean up logic we need to fire offf + */ +import signalExit from "signal-exit" +import { getStore } from "./redux" +import reporterActions from "./redux/actions" +import { ActivityStatuses } from "./constants" +import { reporter } from "./reporter" + +const interruptActivities = (): void => { + const { activities } = getStore().getState().logs + Object.keys(activities).forEach(activityId => { + const activity = activities[activityId] + if ( + activity.status === ActivityStatuses.InProgress || + activity.status === ActivityStatuses.NotStarted + ) { + reporter.completeActivity(activityId, ActivityStatuses.Interrupted) + } + }) +} + +export const prematureEnd = (): void => { + // hack so at least one activity is surely failed, so + // we are guaranteed to generate FAILED status + // if none of activity did explicitly fail + reporterActions.createPendingActivity({ + id: `panic`, + status: ActivityStatuses.Failed, + }) + + interruptActivities() +} + +export const catchExitSignals = (): void => { + signalExit((code, signal) => { + if (code !== 0 && signal !== `SIGINT` && signal !== `SIGTERM`) + prematureEnd() + else interruptActivities() + }) +} diff --git a/packages/gatsby-cli/src/reporter/index.js b/packages/gatsby-cli/src/reporter/index.js deleted file mode 100644 index 009064fcea216..0000000000000 --- a/packages/gatsby-cli/src/reporter/index.js +++ /dev/null @@ -1,430 +0,0 @@ -// @flow - -const semver = require(`semver`) -const { isCI } = require(`gatsby-core-utils`) -const signalExit = require(`signal-exit`) -const reporterActions = require(`./redux/actions`) - -const { LogLevels, ActivityStatuses, ActivityTypes } = require(`./constants`) - -let inkExists = false -try { - inkExists = require.resolve(`ink`) - // eslint-disable-next-line no-empty -} catch (err) {} - -if (!process.env.GATSBY_LOGGER) { - if ( - inkExists && - semver.satisfies(process.version, `>=8`) && - !isCI() && - typeof jest === `undefined` - ) { - process.env.GATSBY_LOGGER = `ink` - } else { - process.env.GATSBY_LOGGER = `yurnalist` - } -} -// if child process - use ipc logger -if (process.send) { - // process.env.FORCE_COLOR = `0` - - require(`./loggers/ipc`) -} - -if (process.env.GATSBY_LOGGER.includes(`json`)) { - require(`./loggers/json`) -} else if (process.env.GATSBY_LOGGER.includes(`yurnalist`)) { - require(`./loggers/yurnalist`) -} else { - require(`./loggers/ink`) -} - -const util = require(`util`) -const { stripIndent } = require(`common-tags`) -const chalk = require(`chalk`) -const { trackError } = require(`gatsby-telemetry`) -const tracer = require(`opentracing`).globalTracer() - -const { getErrorFormatter } = require(`./errors`) -const { getStore } = require(`./redux`) -import constructError from "../structured-errors/construct-error" - -const errorFormatter = getErrorFormatter() - -import type { ActivityTracker, ActivityArgs, Reporter } from "./types" - -const addMessage = level => text => reporterActions.createLog({ level, text }) - -let isVerbose = false - -const interruptActivities = () => { - const { activities } = getStore().getState().logs - Object.keys(activities).forEach(activityId => { - const activity = activities[activityId] - if ( - activity.status === ActivityStatuses.InProgress || - activity.status === ActivityStatuses.NotStarted - ) { - reporter.completeActivity(activityId, ActivityStatuses.Interrupted) - } - }) -} - -const prematureEnd = () => { - // hack so at least one activity is surely failed, so - // we are guaranteed to generate FAILED status - // if none of activity did explicitly fail - reporterActions.createPendingActivity({ - id: `panic`, - status: ActivityStatuses.Failed, - }) - - interruptActivities() -} - -signalExit((code, signal) => { - if (code !== 0 && signal !== `SIGINT` && signal !== `SIGTERM`) prematureEnd() - else interruptActivities() -}) - -/** - * Reporter module. - * @module reporter - */ -const reporter: Reporter = { - /** - * Strip initial indentation template function. - */ - stripIndent, - format: chalk, - /** - * Toggle verbosity. - * @param {boolean} [_isVerbose=true] - */ - setVerbose: (_isVerbose = true) => { - isVerbose = _isVerbose - }, - /** - * Turn off colors in error output. - * @param {boolean} [isNoColor=false] - */ - setNoColor(isNoColor = false) { - if (isNoColor) { - errorFormatter.withoutColors() - } - - // disables colors in popular terminal output coloring packages - // - chalk: see https://www.npmjs.com/package/chalk#chalksupportscolor - // - ansi-colors: see https://github.com/doowb/ansi-colors/blob/8024126c7115a0efb25a9a0e87bc5e29fd66831f/index.js#L5-L7 - if (isNoColor) { - process.env.FORCE_COLOR = `0` - // chalk determines color level at import time. Before we reach this point, - // chalk was already imported, so we need to retroactively adjust level - chalk.level = 0 - } - }, - /** - * Log arguments and exit process with status 1. - * @param {*} args - */ - panic(...args) { - const error = reporter.error(...args) - trackError(`GENERAL_PANIC`, { error }) - prematureEnd() - process.exit(1) - }, - - panicOnBuild(...args) { - const error = reporter.error(...args) - trackError(`BUILD_PANIC`, { error }) - if (process.env.gatsby_executing_command === `build`) { - prematureEnd() - process.exit(1) - } - return error - }, - - error(errorMeta, error) { - let details = {} - // Many paths to retain backcompat :scream: - if (arguments.length === 2) { - if (Array.isArray(error)) { - return error.map(errorItem => this.error(errorMeta, errorItem)) - } - details.error = error - details.context = { - sourceMessage: errorMeta + ` ` + error.message, - } - } else if (arguments.length === 1 && errorMeta instanceof Error) { - details.error = errorMeta - details.context = { - sourceMessage: errorMeta.message, - } - } else if (arguments.length === 1 && Array.isArray(errorMeta)) { - // when we get an array of messages, call this function once for each error - return errorMeta.map(errorItem => this.error(errorItem)) - } else if (arguments.length === 1 && typeof errorMeta === `object`) { - details = Object.assign({}, errorMeta) - } else if (arguments.length === 1 && typeof errorMeta === `string`) { - details.context = { - sourceMessage: errorMeta, - } - } - - const structuredError = constructError({ details }) - if (structuredError) { - reporterActions.createLog(structuredError) - } - - // TODO: remove this once Error component can render this info - // log formatted stacktrace - if (structuredError.error) { - this.log(errorFormatter.render(structuredError.error)) - } - return structuredError - }, - - /** - * Set prefix on uptime. - * @param {string} prefix - A string to prefix uptime with. - */ - uptime(prefix) { - this.verbose(`${prefix}: ${(process.uptime() * 1000).toFixed(3)}ms`) - }, - - verbose: text => { - if (isVerbose) { - reporterActions.createLog({ - level: LogLevels.Debug, - text, - }) - } - }, - - success: addMessage(LogLevels.Success), - info: addMessage(LogLevels.Info), - warn: addMessage(LogLevels.Warning), - log: addMessage(LogLevels.Log), - - pendingActivity: reporterActions.createPendingActivity, - - completeActivity: (id: string, status: string = ActivityStatuses.Success) => { - reporterActions.endActivity({ id, status }) - }, - - /** - * Time an activity. - * @param {string} text - Name of activity. - * @param {ActivityArgs} activityArgs - optional object with tracer parentSpan - * @returns {ActivityTracker} The activity tracker. - */ - activityTimer( - text: string, - activityArgs: ActivityArgs = {} - ): ActivityTracker { - let { parentSpan, id } = activityArgs - const spanArgs = parentSpan ? { childOf: parentSpan } : {} - if (!id) { - id = text - } - - const span = tracer.startSpan(text, spanArgs) - - return { - start: () => { - reporterActions.startActivity({ - id, - text, - type: ActivityTypes.Spinner, - }) - }, - setStatus: statusText => { - reporterActions.setActivityStatusText({ - id, - statusText, - }) - }, - panicOnBuild(...args) { - span.finish() - - reporterActions.setActivityErrored({ - id, - }) - - return reporter.panicOnBuild(...args) - }, - panic(...args) { - span.finish() - - reporterActions.endActivity({ - id, - status: ActivityStatuses.Failed, - }) - - return reporter.panic(...args) - }, - end() { - span.finish() - - reporterActions.endActivity({ - id, - status: ActivityStatuses.Success, - }) - }, - span, - } - }, - - /** - * Create an Activity that is not visible to the user - * - * During the lifecycle of the Gatsby process, sometimes we need to do some - * async work and wait for it to complete. A typical example of this is a job. - * This work should set the status of the process to `in progress` while running and - * `complete` (or `failure`) when complete. Activities do just this! However, they - * are visible to the user. So this function can be used to create a _hidden_ activity - * that while not displayed in the CLI, still triggers a change in process status. - * - * @param {string} text - Name of activity. - * @param {ActivityArgs} activityArgs - optional object with tracer parentSpan - * @returns {ActivityTracker} The activity tracker. - */ - phantomActivity( - text: string, - activityArgs: ActivityArgs = {} - ): ActivityTracker { - let { parentSpan, id } = activityArgs - const spanArgs = parentSpan ? { childOf: parentSpan } : {} - if (!id) { - id = text - } - - const span = tracer.startSpan(text, spanArgs) - - return { - start: () => { - reporterActions.startActivity({ - id, - text, - type: ActivityTypes.Hidden, - }) - }, - end() { - span.finish() - - reporterActions.endActivity({ - id, - status: ActivityStatuses.Success, - }) - }, - span, - } - }, - - /** - * Create a progress bar for an activity - * @param {string} text - Name of activity. - * @param {number} total - Total items to be processed. - * @param {number} start - Start count to show. - * @param {ActivityArgs} activityArgs - optional object with tracer parentSpan - * @returns {ActivityTracker} The activity tracker. - */ - createProgress( - text: string, - total = 0, - start = 0, - activityArgs: ActivityArgs = {} - ): ActivityTracker { - let { parentSpan, id } = activityArgs - const spanArgs = parentSpan ? { childOf: parentSpan } : {} - if (!id) { - id = text - } - const span = tracer.startSpan(text, spanArgs) - - let lastUpdateTime = 0 - let unflushedProgress = 0 - let unflushedTotal = 0 - const progressUpdateDelay = Math.round(1000 / 10) // 10 fps *shrug* - - const updateProgress = forced => { - const t = Date.now() - if (!forced && t - lastUpdateTime <= progressUpdateDelay) return - - if (unflushedTotal > 0) { - reporterActions.setActivityTotal({ id, total: unflushedTotal }) - unflushedTotal = 0 - } - if (unflushedProgress > 0) { - reporterActions.activityTick({ id, increment: unflushedProgress }) - unflushedProgress = 0 - } - lastUpdateTime = t - } - - return { - start: () => { - reporterActions.startActivity({ - id, - text, - type: ActivityTypes.Progress, - current: start, - total, - }) - }, - setStatus: statusText => { - reporterActions.setActivityStatusText({ - id, - statusText, - }) - }, - tick: (increment = 1) => { - unflushedProgress += increment // Have to manually track this :/ - updateProgress() - }, - panicOnBuild(...args) { - span.finish() - - reporterActions.setActivityErrored({ - id, - }) - - return reporter.panicOnBuild(...args) - }, - panic(...args) { - span.finish() - - reporterActions.endActivity({ - id, - status: ActivityStatuses.Failed, - }) - - return reporter.panic(...args) - }, - done: () => { - updateProgress(true) - span.finish() - reporterActions.endActivity({ - id, - status: ActivityStatuses.Success, - }) - }, - set total(value) { - unflushedTotal = value - updateProgress() - }, - span, - } - }, - // This method was called in older versions of gatsby, so we need to keep it to avoid - // "reporter._setStage is not a function" error when gatsby@<2.16 is used with gatsby-cli@>=2.8 - _setStage() {}, -} - -console.log = (...args) => reporter.log(util.format(...args)) -console.warn = (...args) => reporter.warn(util.format(...args)) -console.info = (...args) => reporter.info(util.format(...args)) -console.error = (...args) => reporter.error(util.format(...args)) - -module.exports = reporter diff --git a/packages/gatsby-cli/src/reporter/index.ts b/packages/gatsby-cli/src/reporter/index.ts new file mode 100644 index 0000000000000..1417ea5e2d44d --- /dev/null +++ b/packages/gatsby-cli/src/reporter/index.ts @@ -0,0 +1,11 @@ +import { startLogger } from "./start-logger" +import { patchConsole } from "./patch-console" +import { catchExitSignals } from "./catch-exit-signals" +import { reporter } from "./reporter" + +catchExitSignals() +startLogger() +patchConsole(reporter) + +export default reporter +module.exports = reporter diff --git a/packages/gatsby-cli/src/reporter/patch-console.ts b/packages/gatsby-cli/src/reporter/patch-console.ts new file mode 100644 index 0000000000000..f10d27e0e96c6 --- /dev/null +++ b/packages/gatsby-cli/src/reporter/patch-console.ts @@ -0,0 +1,24 @@ +/* + * This module is used to patch console through our reporter so we can track + * these logs + */ +import util from "util" +import { reporter as gatsbyReporter } from "./reporter" + +export const patchConsole = (reporter: typeof gatsbyReporter): void => { + console.log = (...args: any[]): void => { + const [format, ...rest] = args + reporter.log(util.format(format === undefined ? `` : format, ...rest)) + } + console.warn = (...args: any[]): void => { + const [format, ...rest] = args + reporter.warn(util.format(format === undefined ? `` : format, ...rest)) + } + console.info = (...args: any[]): void => { + const [format, ...rest] = args + reporter.info(util.format(format === undefined ? `` : format, ...rest)) + } + console.error = (format: any, ...args: any[]): void => { + reporter.error(util.format(format, ...args)) + } +} diff --git a/packages/gatsby-cli/src/reporter/reporter-phantom.ts b/packages/gatsby-cli/src/reporter/reporter-phantom.ts new file mode 100644 index 0000000000000..b052561c91eb9 --- /dev/null +++ b/packages/gatsby-cli/src/reporter/reporter-phantom.ts @@ -0,0 +1,42 @@ +import reporterActions from "./redux/actions" +import { ActivityStatuses, ActivityTypes } from "./constants" +import { Span } from "opentracing" + +interface ICreatePhantomReporterArguments { + text: string + id: string + span: Span +} + +export interface IPhantomReporter { + start(): void + end(): void + span: Span +} + +export const createPhantomReporter = ({ + text, + id, + span, +}: ICreatePhantomReporterArguments): IPhantomReporter => { + return { + start(): void { + reporterActions.startActivity({ + id, + text, + type: ActivityTypes.Hidden, + }) + }, + + end(): void { + span.finish() + + reporterActions.endActivity({ + id, + status: ActivityStatuses.Success, + }) + }, + + span, + } +} diff --git a/packages/gatsby-cli/src/reporter/reporter-progress.ts b/packages/gatsby-cli/src/reporter/reporter-progress.ts new file mode 100644 index 0000000000000..eb0477bb8d177 --- /dev/null +++ b/packages/gatsby-cli/src/reporter/reporter-progress.ts @@ -0,0 +1,122 @@ +import reporterActions from "./redux/actions" +import { ActivityStatuses, ActivityTypes } from "./constants" +import { Span } from "opentracing" +import { reporter as gatsbyReporter } from "./reporter" +import { IStructuredError } from "../structured-errors/types" +import { ErrorMeta } from "./types" + +interface ICreateProgressReporterArguments { + id: string + text: string + start: number + total: number + span: Span + reporter: typeof gatsbyReporter +} + +export interface IProgressReporter { + start(): void + setStatus(statusText: string): void + tick(increment?: number): void + panicOnBuild( + arg: any, + ...otherArgs: any[] + ): IStructuredError | IStructuredError[] + panic(arg: any, ...otherArgs: any[]): void + done(): void + total: number + span: Span +} + +export const createProgressReporter = ({ + id, + text, + start, + total, + span, + reporter, +}: ICreateProgressReporterArguments): IProgressReporter => { + let lastUpdateTime = 0 + let unflushedProgress = 0 + let unflushedTotal = 0 + const progressUpdateDelay = Math.round(1000 / 10) // 10 fps *shrug* + + const updateProgress = (forced: boolean = false): void => { + const t = Date.now() + if (!forced && t - lastUpdateTime <= progressUpdateDelay) return + + if (unflushedTotal > 0) { + reporterActions.setActivityTotal({ id, total: unflushedTotal }) + unflushedTotal = 0 + } + if (unflushedProgress > 0) { + reporterActions.activityTick({ id, increment: unflushedProgress }) + unflushedProgress = 0 + } + lastUpdateTime = t + } + + return { + start(): void { + reporterActions.startActivity({ + id, + text, + type: ActivityTypes.Progress, + current: start, + total, + }) + }, + + setStatus(statusText: string): void { + reporterActions.setActivityStatusText({ + id, + statusText, + }) + }, + + tick(increment: number = 1): void { + unflushedProgress += increment // Have to manually track this :/ + updateProgress() + }, + + panicOnBuild( + errorMeta: ErrorMeta, + error?: Error | Error[] + ): IStructuredError | IStructuredError[] { + span.finish() + + reporterActions.setActivityErrored({ + id, + }) + + return reporter.panicOnBuild(errorMeta, error) + }, + + panic(errorMeta: ErrorMeta, error?: Error | Error[]): void { + span.finish() + + reporterActions.endActivity({ + id, + status: ActivityStatuses.Failed, + }) + + return reporter.panic(errorMeta, error) + }, + + done(): void { + updateProgress(true) + span.finish() + reporterActions.endActivity({ + id, + status: ActivityStatuses.Success, + }) + }, + + set total(value: number) { + unflushedTotal = value + updateProgress() + }, + + span, + } +} diff --git a/packages/gatsby-cli/src/reporter/reporter-timer.ts b/packages/gatsby-cli/src/reporter/reporter-timer.ts new file mode 100644 index 0000000000000..3d19179422fdf --- /dev/null +++ b/packages/gatsby-cli/src/reporter/reporter-timer.ts @@ -0,0 +1,88 @@ +/* + * This module is used when calling reporter. + * these logs + */ +import reporterActions from "./redux/actions" +import { ActivityStatuses, ActivityTypes } from "./constants" +import { Span } from "opentracing" +import { reporter as gatsbyReporter } from "./reporter" +import { IStructuredError } from "../structured-errors/types" +import { ErrorMeta } from "./types" + +interface ICreateTimerReporterArguments { + text: string + id: string + span: Span + reporter: typeof gatsbyReporter +} + +export interface ITimerReporter { + start(): void + setStatus(statusText: string): void + panicOnBuild( + arg: any, + ...otherArgs: any[] + ): IStructuredError | IStructuredError[] + panic(arg: any, ...otherArgs: any[]): void + end(): void + span: Span +} + +export const createTimerReporter = ({ + text, + id, + span, + reporter, +}: ICreateTimerReporterArguments): ITimerReporter => { + return { + start(): void { + reporterActions.startActivity({ + id, + text, + type: ActivityTypes.Spinner, + }) + }, + + setStatus(statusText: string): void { + reporterActions.setActivityStatusText({ + id, + statusText, + }) + }, + + panicOnBuild( + errorMeta: ErrorMeta, + error?: Error | Error[] + ): IStructuredError | IStructuredError[] { + span.finish() + + reporterActions.setActivityErrored({ + id, + }) + + return reporter.panicOnBuild(errorMeta, error) + }, + + panic(errorMeta: ErrorMeta, error?: Error | Error[]): void { + span.finish() + + reporterActions.endActivity({ + id, + status: ActivityStatuses.Failed, + }) + + return reporter.panic(errorMeta, error) + }, + + end(): void { + span.finish() + + reporterActions.endActivity({ + id, + status: ActivityStatuses.Success, + }) + }, + + span, + } +} diff --git a/packages/gatsby-cli/src/reporter/reporter.ts b/packages/gatsby-cli/src/reporter/reporter.ts new file mode 100644 index 0000000000000..87be5a5b65f35 --- /dev/null +++ b/packages/gatsby-cli/src/reporter/reporter.ts @@ -0,0 +1,259 @@ +import { stripIndent } from "common-tags" +import chalk from "chalk" +import { trackError } from "gatsby-telemetry" +import { globalTracer, Span } from "opentracing" + +import reporterActions from "./redux/actions" +import { LogLevels, ActivityStatuses } from "./constants" +import { getErrorFormatter } from "./errors" +import constructError from "../structured-errors/construct-error" +import { prematureEnd } from "./catch-exit-signals" +import { IStructuredError } from "../structured-errors/types" +import { createTimerReporter, ITimerReporter } from "./reporter-timer" +import { createPhantomReporter, IPhantomReporter } from "./reporter-phantom" +import { createProgressReporter, IProgressReporter } from "./reporter-progress" +import { ErrorMeta, CreateLogAction } from "./types" + +const errorFormatter = getErrorFormatter() +const tracer = globalTracer() + +interface IActivityArgs { + id?: string + parentSpan?: Span +} + +let isVerbose = false + +/** + * Reporter module. + * @module reporter + */ +class Reporter { + /** + * Strip initial indentation template function. + */ + stripIndent = stripIndent + format = chalk + + /** + * Toggle verbosity. + */ + setVerbose = (_isVerbose: boolean = true): void => { + isVerbose = _isVerbose + } + + /** + * Turn off colors in error output. + */ + setNoColor = (isNoColor: boolean = false): void => { + if (isNoColor) { + errorFormatter.withoutColors() + } + + // disables colors in popular terminal output coloring packages + // - chalk: see https://www.npmjs.com/package/chalk#chalksupportscolor + // - ansi-colors: see https://github.com/doowb/ansi-colors/blob/8024126c7115a0efb25a9a0e87bc5e29fd66831f/index.js#L5-L7 + if (isNoColor) { + process.env.FORCE_COLOR = `0` + // chalk determines color level at import time. Before we reach this point, + // chalk was already imported, so we need to retroactively adjust level + chalk.level = 0 + } + } + + /** + * Log arguments and exit process with status 1. + */ + panic = (errorMeta: ErrorMeta, error?: Error | Error[]): void => { + const reporterError = this.error(errorMeta, error) + trackError(`GENERAL_PANIC`, { error: reporterError }) + prematureEnd() + process.exit(1) + } + + panicOnBuild = ( + errorMeta: ErrorMeta, + error?: Error | Error[] + ): IStructuredError | IStructuredError[] => { + const reporterError = this.error(errorMeta, error) + trackError(`BUILD_PANIC`, { error: reporterError }) + if (process.env.gatsby_executing_command === `build`) { + prematureEnd() + process.exit(1) + } + return reporterError + } + + error = ( + errorMeta: ErrorMeta, + error?: Error | Error[] + ): IStructuredError | IStructuredError[] => { + let details: { + error?: Error + context: {} + } = { + context: {}, + } + + // Many paths to retain backcompat :scream: + // 1. + // reporter.error(any, Error); + // reporter.error(any, [Error]); + if (error) { + if (Array.isArray(error)) { + return error.map(errorItem => + this.error(errorMeta, errorItem) + ) as IStructuredError[] + } + details.error = error + details.context = { + sourceMessage: errorMeta + ` ` + error.message, + } + // 2. + // reporter.error(Error); + } else if (errorMeta instanceof Error) { + details.error = errorMeta + details.context = { + sourceMessage: errorMeta.message, + } + // 3. + // reporter.error([Error]); + } else if (Array.isArray(errorMeta)) { + // when we get an array of messages, call this function once for each error + return errorMeta.map(errorItem => + this.error(errorItem) + ) as IStructuredError[] + // 4. + // reporter.error(errorMeta); + } else if (typeof errorMeta === `object`) { + details = { ...errorMeta } + // 5. + // reporter.error('foo'); + } else if (typeof errorMeta === `string`) { + details.context = { + sourceMessage: errorMeta, + } + } + + const structuredError = constructError({ details }) + if (structuredError) { + reporterActions.createLog(structuredError) + } + + // TODO: remove this once Error component can render this info + // log formatted stacktrace + if (structuredError.error) { + this.log(errorFormatter.render(structuredError.error)) + } + return structuredError + } + + /** + * Set prefix on uptime. + */ + uptime = (prefix: string): void => { + this.verbose(`${prefix}: ${(process.uptime() * 1000).toFixed(3)}ms`) + } + + verbose = (text: string): void => { + if (isVerbose) { + reporterActions.createLog({ + level: LogLevels.Debug, + text, + }) + } + } + + success = (text?: string): CreateLogAction => + reporterActions.createLog({ level: LogLevels.Success, text }) + info = (text?: string): CreateLogAction => + reporterActions.createLog({ level: LogLevels.Info, text }) + warn = (text?: string): CreateLogAction => + reporterActions.createLog({ level: LogLevels.Warning, text }) + log = (text?: string): CreateLogAction => + reporterActions.createLog({ level: LogLevels.Log, text }) + + pendingActivity = reporterActions.createPendingActivity + + completeActivity = ( + id: string, + status: ActivityStatuses = ActivityStatuses.Success + ): void => { + reporterActions.endActivity({ id, status }) + } + + /** + * Time an activity. + */ + activityTimer = ( + text: string, + activityArgs: IActivityArgs = {} + ): ITimerReporter => { + let { parentSpan, id } = activityArgs + const spanArgs = parentSpan ? { childOf: parentSpan } : {} + if (!id) { + id = text + } + + const span = tracer.startSpan(text, spanArgs) + + return createTimerReporter({ text, id, span, reporter: this }) + } + + /** + * Create an Activity that is not visible to the user + * + * During the lifecycle of the Gatsby process, sometimes we need to do some + * async work and wait for it to complete. A typical example of this is a job. + * This work should set the status of the process to `in progress` while running and + * `complete` (or `failure`) when complete. Activities do just this! However, they + * are visible to the user. So this function can be used to create a _hidden_ activity + * that while not displayed in the CLI, still triggers a change in process status. + */ + phantomActivity = ( + text: string, + activityArgs: IActivityArgs = {} + ): IPhantomReporter => { + let { parentSpan, id } = activityArgs + const spanArgs = parentSpan ? { childOf: parentSpan } : {} + if (!id) { + id = text + } + + const span = tracer.startSpan(text, spanArgs) + + return createPhantomReporter({ id, text, span }) + } + + /** + * Create a progress bar for an activity + */ + createProgress = ( + text: string, + total = 0, + start = 0, + activityArgs: IActivityArgs = {} + ): IProgressReporter => { + let { parentSpan, id } = activityArgs + const spanArgs = parentSpan ? { childOf: parentSpan } : {} + if (!id) { + id = text + } + const span = tracer.startSpan(text, spanArgs) + + return createProgressReporter({ + id, + text, + total, + start, + span, + reporter: this, + }) + } + + // This method was called in older versions of gatsby, so we need to keep it to avoid + // "reporter._setStage is not a function" error when gatsby@<2.16 is used with gatsby-cli@>=2.8 + _setStage = (): void => {} +} + +export const reporter = new Reporter() diff --git a/packages/gatsby-cli/src/reporter/start-logger.ts b/packages/gatsby-cli/src/reporter/start-logger.ts new file mode 100644 index 0000000000000..bb6166fb0cee9 --- /dev/null +++ b/packages/gatsby-cli/src/reporter/start-logger.ts @@ -0,0 +1,40 @@ +/* + * This module is a side-effect filled module to load in the proper logger. + */ +import semver from "semver" +import { isCI } from "gatsby-core-utils" + +export const startLogger = (): void => { + let inkExists = false + try { + inkExists = !!require.resolve(`ink`) + // eslint-disable-next-line no-empty + } catch (err) {} + + if (!process.env.GATSBY_LOGGER) { + if ( + inkExists && + semver.satisfies(process.version, `>=8`) && + !isCI() && + typeof jest === `undefined` + ) { + process.env.GATSBY_LOGGER = `ink` + } else { + process.env.GATSBY_LOGGER = `yurnalist` + } + } + // if child process - use ipc logger + if (process.send) { + // process.env.FORCE_COLOR = `0` + + require(`./loggers/ipc`) + } + + if (process.env.GATSBY_LOGGER.includes(`json`)) { + require(`./loggers/json`) + } else if (process.env.GATSBY_LOGGER.includes(`yurnalist`)) { + require(`./loggers/yurnalist`) + } else { + require(`./loggers/ink`) + } +} diff --git a/packages/gatsby-cli/src/reporter/types.js b/packages/gatsby-cli/src/reporter/types.js deleted file mode 100644 index e0131dbc55965..0000000000000 --- a/packages/gatsby-cli/src/reporter/types.js +++ /dev/null @@ -1,31 +0,0 @@ -// @flow - -export type ActivityTracker = { - start(): Function, - end(): Function, - setStatus(status: string): Function, - span: Object, -} - -export type ActivityArgs = { - parentSpan?: Object, -} - -type LogMessageType = (format: string, ...args: Array) => void - -export interface Reporter { - stripIndent: Function; - format: Object; - setVerbose(isVerbose: boolean): void; - setNoColor(isNoColor: boolean): void; - panic(...args: Array): void; - panicOnBuild(...args: Array): void; - error(errorMeta: string | Object, error?: Object): void; - uptime(prefix: string): void; - success: LogMessageType; - verbose: LogMessageType; - info: LogMessageType; - warn: LogMessageType; - log: LogMessageType; - activityTimer(name: string, activityArgs: ActivityArgs): ActivityTracker; -} diff --git a/packages/gatsby-cli/src/reporter/types.ts b/packages/gatsby-cli/src/reporter/types.ts new file mode 100644 index 0000000000000..7a97a7118b832 --- /dev/null +++ b/packages/gatsby-cli/src/reporter/types.ts @@ -0,0 +1,12 @@ +// TODO: This needs to be implemented when redux/acitons is converted to TS +export type CreateLogAction = any + +export type ErrorMeta = + | { + id: string + error?: Error + context: Record + [id: string]: any + } + | string + | Error diff --git a/packages/gatsby-dev-cli/CHANGELOG.md b/packages/gatsby-dev-cli/CHANGELOG.md index 5ce0de37e4761..e7b8c47aefa7f 100644 --- a/packages/gatsby-dev-cli/CHANGELOG.md +++ b/packages/gatsby-dev-cli/CHANGELOG.md @@ -3,6 +3,12 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [2.6.2](https://github.com/gatsbyjs/gatsby/compare/gatsby-dev-cli@2.6.1...gatsby-dev-cli@2.6.2) (2020-04-08) + +### Bug Fixes + +- **docs:** remove double words ([#22494](https://github.com/gatsbyjs/gatsby/issues/22494)) ([75f6ee2](https://github.com/gatsbyjs/gatsby/commit/75f6ee2)) + ## [2.6.1](https://github.com/gatsbyjs/gatsby/compare/gatsby-dev-cli@2.6.0...gatsby-dev-cli@2.6.1) (2020-03-23) **Note:** Version bump only for package gatsby-dev-cli diff --git a/packages/gatsby-dev-cli/README.md b/packages/gatsby-dev-cli/README.md index 551af3066b37f..70b393061f259 100644 --- a/packages/gatsby-dev-cli/README.md +++ b/packages/gatsby-dev-cli/README.md @@ -31,6 +31,20 @@ this program running. Typically you'll also want to run `npm run watch` in the Gatsby repo to set up watchers to build Gatsby source code. +## Revert to current packages + +If you've recently run `gatsby-dev` your `node_modules` will be out of sync with current published packages. In order to undo this, you can remove the `node_modules` directory or run: + +```shell +git checkout package.json; yarn --force +``` + +or + +```shell +git checkout package.json; npm install --force +``` + **[Demo Video](https://www.youtube.com/watch?v=D0SwX1MSuas)** More detailed instruction for setting up your Gatsby development environment can diff --git a/packages/gatsby-dev-cli/package.json b/packages/gatsby-dev-cli/package.json index 5e746c7468e1e..a5d651c734cb5 100644 --- a/packages/gatsby-dev-cli/package.json +++ b/packages/gatsby-dev-cli/package.json @@ -1,7 +1,7 @@ { "name": "gatsby-dev-cli", "description": "CLI helpers for contributors working on Gatsby", - "version": "2.6.1", + "version": "2.6.2", "author": "Kyle Mathews ", "bin": { "gatsby-dev": "./dist/index.js" diff --git a/packages/gatsby-image/CHANGELOG.md b/packages/gatsby-image/CHANGELOG.md index a7c4d57034abc..af91e3c96f100 100644 --- a/packages/gatsby-image/CHANGELOG.md +++ b/packages/gatsby-image/CHANGELOG.md @@ -3,6 +3,12 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [2.3.2](https://github.com/gatsbyjs/gatsby/compare/gatsby-image@2.3.1...gatsby-image@2.3.2) (2020-04-07) + +### Bug Fixes + +- **gatsby-image:** apply IE polyfill styles to placeholder images too ([#22863](https://github.com/gatsbyjs/gatsby/issues/22863)) ([80c453e](https://github.com/gatsbyjs/gatsby/commit/80c453e)) + ## [2.3.1](https://github.com/gatsbyjs/gatsby/compare/gatsby-image@2.3.0...gatsby-image@2.3.1) (2020-03-23) ### Bug Fixes diff --git a/packages/gatsby-image/package.json b/packages/gatsby-image/package.json index 190492b76c7cf..a739a4d9b629d 100644 --- a/packages/gatsby-image/package.json +++ b/packages/gatsby-image/package.json @@ -1,7 +1,7 @@ { "name": "gatsby-image", "description": "Lazy-loading React image component with optional support for the blur-up effect.", - "version": "2.3.1", + "version": "2.3.2", "author": "Kyle Mathews ", "bugs": { "url": "https://github.com/gatsbyjs/gatsby/issues" diff --git a/packages/gatsby-image/src/index.js b/packages/gatsby-image/src/index.js index 5cbb09e7a72c8..8621fbfe83fb6 100644 --- a/packages/gatsby-image/src/index.js +++ b/packages/gatsby-image/src/index.js @@ -262,14 +262,12 @@ const noscriptImg = props => { // Earlier versions of gatsby-image during the 2.x cycle did not wrap // the `Img` component in a `picture` element. This maintains compatibility // until a breaking change can be introduced in the next major release -const Placeholder = ({ - src, - imageVariants, - generateSources, - spreadProps, - ariaHidden, -}) => { - const baseImage = +const Placeholder = React.forwardRef((props, ref) => { + const { src, imageVariants, generateSources, spreadProps, ariaHidden } = props + + const baseImage = ( + + ) return imageVariants.length > 1 ? ( @@ -279,7 +277,7 @@ const Placeholder = ({ ) : ( baseImage ) -} +}) const Img = React.forwardRef((props, ref) => { const { @@ -357,6 +355,7 @@ class Image extends React.Component { } this.imageRef = React.createRef() + this.placeholderRef = props.placeholderRef || React.createRef() this.handleImageLoaded = this.handleImageLoaded.bind(this) this.handleRef = this.handleRef.bind(this) } @@ -517,6 +516,7 @@ class Image extends React.Component { {image.base64 && ( + import(`object-fit-images`).then(({ default: ObjectFitImages }) => { ObjectFitImages(this.imageRef.current.imageRef.current) - ) + ObjectFitImages(this.placeholderRef.current) + }) } } render() { const { objectFit, objectPosition, ...props } = this.props + const polyfillStyle = { + objectFit: objectFit, + objectPosition: objectPosition, + fontFamily: `"object-fit: ${objectFit}; object-position: ${objectPosition}"`, + } + return ( ) diff --git a/packages/gatsby-plugin-mdx/CHANGELOG.md b/packages/gatsby-plugin-mdx/CHANGELOG.md index c65fee889e0d3..1b227a566c288 100644 --- a/packages/gatsby-plugin-mdx/CHANGELOG.md +++ b/packages/gatsby-plugin-mdx/CHANGELOG.md @@ -3,6 +3,22 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [1.1.7](https://github.com/gatsbyjs/gatsby/compare/gatsby-plugin-mdx@1.1.6...gatsby-plugin-mdx@1.1.7) (2020-04-10) + +### Bug Fixes + +- **gatsby-plugin-mdx:** Truncate non-latin language excerpts correctly ([#22638](https://github.com/gatsbyjs/gatsby/issues/22638)) ([ec80671](https://github.com/gatsbyjs/gatsby/commit/ec80671)) + +## [1.1.6](https://github.com/gatsbyjs/gatsby/compare/gatsby-plugin-mdx@1.1.5...gatsby-plugin-mdx@1.1.6) (2020-04-07) + +### Bug Fixes + +- **gatsby-plugin-mdx:** pass on proper `modules` option value to babel ([#22903](https://github.com/gatsbyjs/gatsby/issues/22903)) ([cd120ae](https://github.com/gatsbyjs/gatsby/commit/cd120ae)) + +## [1.1.5](https://github.com/gatsbyjs/gatsby/compare/gatsby-plugin-mdx@1.1.4...gatsby-plugin-mdx@1.1.5) (2020-04-07) + +**Note:** Version bump only for package gatsby-plugin-mdx + ## [1.1.4](https://github.com/gatsbyjs/gatsby/compare/gatsby-plugin-mdx@1.1.3...gatsby-plugin-mdx@1.1.4) (2020-03-30) **Note:** Version bump only for package gatsby-plugin-mdx diff --git a/packages/gatsby-plugin-mdx/README.md b/packages/gatsby-plugin-mdx/README.md index 9006023a3980d..ebadfca7cec7e 100644 --- a/packages/gatsby-plugin-mdx/README.md +++ b/packages/gatsby-plugin-mdx/README.md @@ -52,7 +52,7 @@ MDX seeks to make writing with Markdown and JSX simpler while being more express Install with npm: ```shell -npm install --save gatsby-plugin-mdx @mdx-js/mdx @mdx-js/react +npm install gatsby-plugin-mdx @mdx-js/mdx @mdx-js/react ``` Install with yarn: @@ -335,6 +335,7 @@ images can be optimized by Gatsby and you should continue using it. // gatsby-config.js module.exports = { plugins: [ + `gatsby-remark-images`, { resolve: `gatsby-plugin-mdx`, options: { @@ -358,6 +359,8 @@ Using a string reference is also supported for `gatsbyRemarkPlugins`. gatsbyRemarkPlugins: [`gatsby-remark-images`] ``` +> Note that in the case of `gatsby-remark-images` the plugin needs to be included as both a sub-plugin of gatsby-plugin-mdx and a string entry in the plugins array. + #### Remark plugins This is a configuration option that is [mirrored from the core MDX @@ -559,6 +562,22 @@ export const pageQuery = graphql` ` ``` +## Troubleshooting + +### Excerpts for non-latin languages + +By default, `excerpt` uses `underscore.string/prune` which doesn't handle non-latin characters ([https://github.com/epeli/underscore.string/issues/418](https://github.com/epeli/underscore.string/issues/418)). + +If that is the case, you can set `truncate` option on `excerpt` field, like: + +```graphql +{ + markdownRemark { + excerpt(truncate: true) + } +} +``` + ## License MIT diff --git a/packages/gatsby-plugin-mdx/gatsby/source-nodes.js b/packages/gatsby-plugin-mdx/gatsby/source-nodes.js index e22e5dc0a0a6a..04063f32f2a34 100644 --- a/packages/gatsby-plugin-mdx/gatsby/source-nodes.js +++ b/packages/gatsby-plugin-mdx/gatsby/source-nodes.js @@ -1,4 +1,5 @@ const _ = require(`lodash`) +const { GraphQLBoolean } = require(`gatsby/graphql`) const remark = require(`remark`) const english = require(`retext-english`) const remark2retext = require(`remark-retext`) @@ -151,8 +152,12 @@ module.exports = ( type: `Int`, defaultValue: 140, }, + truncate: { + type: GraphQLBoolean, + defaultValue: false, + }, }, - async resolve(mdxNode, { pruneLength }) { + async resolve(mdxNode, { pruneLength, truncate }) { if (mdxNode.excerpt) { return Promise.resolve(mdxNode.excerpt) } @@ -166,7 +171,14 @@ module.exports = ( return }) - return prune(excerptNodes.join(` `), pruneLength, `…`) + if (!truncate) { + return prune(excerptNodes.join(` `), pruneLength, `…`) + } + + return _.truncate(excerptNodes.join(` `), { + length: pruneLength, + omission: `…`, + }) }, }, headings: { diff --git a/packages/gatsby-plugin-mdx/package.json b/packages/gatsby-plugin-mdx/package.json index 1507840fabddb..99dfaefc4f21b 100644 --- a/packages/gatsby-plugin-mdx/package.json +++ b/packages/gatsby-plugin-mdx/package.json @@ -1,6 +1,6 @@ { "name": "gatsby-plugin-mdx", - "version": "1.1.4", + "version": "1.1.7", "description": "MDX integration for Gatsby", "main": "index.js", "license": "MIT", diff --git a/packages/gatsby-plugin-mdx/utils/gen-mdx.js b/packages/gatsby-plugin-mdx/utils/gen-mdx.js index 8e51b88dee1dd..fc6fbd4b3ef3d 100644 --- a/packages/gatsby-plugin-mdx/utils/gen-mdx.js +++ b/packages/gatsby-plugin-mdx/utils/gen-mdx.js @@ -150,7 +150,7 @@ ${code}` { useBuiltIns: `entry`, corejs: 2, - modules: `false`, + modules: false, }, ], ], diff --git a/packages/gatsby-plugin-react-helmet/CHANGELOG.md b/packages/gatsby-plugin-react-helmet/CHANGELOG.md index 31c5c8978630e..f6dd327340ae9 100644 --- a/packages/gatsby-plugin-react-helmet/CHANGELOG.md +++ b/packages/gatsby-plugin-react-helmet/CHANGELOG.md @@ -3,6 +3,12 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [3.2.2](https://github.com/gatsbyjs/gatsby/compare/gatsby-plugin-react-helmet@3.2.1...gatsby-plugin-react-helmet@3.2.2) (2020-04-10) + +### Bug Fixes + +- **gatsby-plugin-react-helmet:** allow the use of `react-helmet@6` ([#22993](https://github.com/gatsbyjs/gatsby/issues/22993)) ([ed2762f](https://github.com/gatsbyjs/gatsby/commit/ed2762f)) + ## [3.2.1](https://github.com/gatsbyjs/gatsby/compare/gatsby-plugin-react-helmet@3.2.0...gatsby-plugin-react-helmet@3.2.1) (2020-03-23) **Note:** Version bump only for package gatsby-plugin-react-helmet diff --git a/packages/gatsby-plugin-react-helmet/package.json b/packages/gatsby-plugin-react-helmet/package.json index e38c12715069a..c7f42cf1db541 100644 --- a/packages/gatsby-plugin-react-helmet/package.json +++ b/packages/gatsby-plugin-react-helmet/package.json @@ -1,7 +1,7 @@ { "name": "gatsby-plugin-react-helmet", "description": "Manage document head data with react-helmet. Provides drop-in server rendering support for Gatsby.", - "version": "3.2.1", + "version": "3.2.2", "author": "Kyle Mathews ", "bugs": { "url": "https://github.com/gatsbyjs/gatsby/issues" @@ -36,7 +36,7 @@ "main": "index.js", "peerDependencies": { "gatsby": "^2.0.0", - "react-helmet": "^5.1.3" + "react-helmet": "^5.1.3 || ^6.0.0" }, "repository": { "type": "git", diff --git a/packages/gatsby-plugin-sitemap/src/__tests__/internals.js b/packages/gatsby-plugin-sitemap/src/__tests__/internals.js index 8b047261d2c74..7a8b5652f25be 100644 --- a/packages/gatsby-plugin-sitemap/src/__tests__/internals.js +++ b/packages/gatsby-plugin-sitemap/src/__tests__/internals.js @@ -137,6 +137,16 @@ describe(`results using non default alternatives`, () => { }, ], }, + otherData: { + nodes: [ + { + name: `test`, + }, + { + name: `test 2`, + }, + ], + }, }, } } @@ -161,5 +171,6 @@ describe(`results using non default alternatives`, () => { const queryRecords = filterQuery(results, [], ``, customSiteResolver) expect(queryRecords.site.siteMetadata.siteUrl).toEqual(customUrl) + expect(queryRecords).toHaveProperty(`otherData`) }) }) diff --git a/packages/gatsby-plugin-sitemap/src/internals.js b/packages/gatsby-plugin-sitemap/src/internals.js index 8e61b4afec0cc..7b5a8356a8425 100644 --- a/packages/gatsby-plugin-sitemap/src/internals.js +++ b/packages/gatsby-plugin-sitemap/src/internals.js @@ -20,7 +20,9 @@ export function filterQuery( throw new Error(errors.join(`, `)) } - let { allPages, originalType } = getNodes(data.allSitePage) + const { allSitePage, ...otherData } = data + + let { allPages, originalType } = getNodes(allSitePage) // Removing excluded paths allPages = allPages.filter( @@ -53,6 +55,7 @@ export function filterQuery( siteUrl = withoutTrailingSlash(siteUrl) return { + ...otherData, allSitePage: { [originalType]: originalType === `nodes` diff --git a/packages/gatsby-plugin-twitter/CHANGELOG.md b/packages/gatsby-plugin-twitter/CHANGELOG.md index 77be9f42196cc..5b2d713b71a01 100644 --- a/packages/gatsby-plugin-twitter/CHANGELOG.md +++ b/packages/gatsby-plugin-twitter/CHANGELOG.md @@ -3,6 +3,10 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [2.2.2](https://github.com/gatsbyjs/gatsby/compare/gatsby-plugin-twitter@2.2.1...gatsby-plugin-twitter@2.2.2) (2020-04-07) + +**Note:** Version bump only for package gatsby-plugin-twitter + ## [2.2.1](https://github.com/gatsbyjs/gatsby/compare/gatsby-plugin-twitter@2.2.0...gatsby-plugin-twitter@2.2.1) (2020-03-23) **Note:** Version bump only for package gatsby-plugin-twitter diff --git a/packages/gatsby-plugin-twitter/README.md b/packages/gatsby-plugin-twitter/README.md index f725fa9fda08a..5048cf676c868 100644 --- a/packages/gatsby-plugin-twitter/README.md +++ b/packages/gatsby-plugin-twitter/README.md @@ -3,16 +3,22 @@ Loads the Twitter JavaScript for embedding tweets, timelines, share and follow buttons. Lets you add tweets to markdown and in other places. -Note: when copying the embed code, just copy the blockquote section and not the -script. - ## Install -`npm install --save gatsby-plugin-twitter` +```shell +npm install gatsby-plugin-twitter +``` -## How to use +## Configure ```javascript // In your gatsby-config.js plugins: [`gatsby-plugin-twitter`] ``` + +## How to Use + +1. On Twitter, find the tweet you'd like to display. +2. In the top right-hand corner of the tweet, click the caret (∨) and select "Embed tweet". +3. Take the code you're given and copy it into your markdown page. + > Note: when copying the embed code, copy **only** the blockquote section and not the script. diff --git a/packages/gatsby-plugin-twitter/package.json b/packages/gatsby-plugin-twitter/package.json index ef0dcea8bec8a..ab8b56a7c76b4 100644 --- a/packages/gatsby-plugin-twitter/package.json +++ b/packages/gatsby-plugin-twitter/package.json @@ -1,7 +1,7 @@ { "name": "gatsby-plugin-twitter", "description": "Loads the Twitter JavaScript for embedding tweets.", - "version": "2.2.1", + "version": "2.2.2", "author": "Kyle Mathews ", "bugs": { "url": "https://github.com/gatsbyjs/gatsby/issues" diff --git a/packages/gatsby-remark-embed-snippet/CHANGELOG.md b/packages/gatsby-remark-embed-snippet/CHANGELOG.md index 042457e287261..a748cc1e70d1f 100644 --- a/packages/gatsby-remark-embed-snippet/CHANGELOG.md +++ b/packages/gatsby-remark-embed-snippet/CHANGELOG.md @@ -3,6 +3,12 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [4.2.2](https://github.com/gatsbyjs/gatsby/compare/gatsby-remark-embed-snippet@4.2.1...gatsby-remark-embed-snippet@4.2.2) (2020-04-11) + +### Features + +- **remark-embed-snippet:** embed specific lines ([#21907](https://github.com/gatsbyjs/gatsby/issues/21907)) ([109b905](https://github.com/gatsbyjs/gatsby/commit/109b905)) + ## [4.2.1](https://github.com/gatsbyjs/gatsby/compare/gatsby-remark-embed-snippet@4.2.0...gatsby-remark-embed-snippet@4.2.1) (2020-03-23) **Note:** Version bump only for package gatsby-remark-embed-snippet diff --git a/packages/gatsby-remark-embed-snippet/README.md b/packages/gatsby-remark-embed-snippet/README.md index 2e34e16a62f11..f1568b03c06fc 100644 --- a/packages/gatsby-remark-embed-snippet/README.md +++ b/packages/gatsby-remark-embed-snippet/README.md @@ -185,9 +185,8 @@ The resulting HTML generated from the markdown file above would look something l ### Highlighting Lines -You can also specify specific lines for Prism to highlight using -`highlight-line` and `highlight-next-line` comments. You can also specify a -range of lines to highlight, relative to a `highlight-range` comment. +You can specify specific lines for Prism to highlight using +`highlight-line` and `highlight-next-line` comments. You can also specify a range of lines to highlight, relative to a `highlight-range` comment. **JavaScript example**: @@ -250,8 +249,49 @@ quz: "highlighted" It's also possible to specify a range of lines to be hidden. +You can either specify line ranges in the embed using the syntax: + +- #Lx - Embed one line from a file +- #Lx-y - Embed a range of lines from a file +- #Lx-y,a-b - Embed non-consecutive ranges of lines from a file + +**Markdown example**: + +```markdown +This is the JSX of my app: + +`embed:App.js#L6-8` +``` + +With this example snippet: + +```js +import React from "react" +import ReactDOM from "react-dom" + +function App() { + return ( +
+

Hello world

+
+ ) +} +``` + +Will produce something like this: + +```markdown +This is the JSX of my app: + +
+

Hello world

+
+``` + **JavaScript example**: +You can also add `// hide-range` comments to your files. + ```jsx // hide-range{1-2} import React from "react" diff --git a/packages/gatsby-remark-embed-snippet/package.json b/packages/gatsby-remark-embed-snippet/package.json index 15862533f3f20..750f64af93a3c 100644 --- a/packages/gatsby-remark-embed-snippet/package.json +++ b/packages/gatsby-remark-embed-snippet/package.json @@ -1,7 +1,7 @@ { "name": "gatsby-remark-embed-snippet", "description": "Gatsby plugin to embed formatted code snippets within markdown", - "version": "4.2.1", + "version": "4.2.2", "author": "Brian Vaughn ", "bugs": { "url": "https://github.com/gatsbyjs/gatsby/issues" diff --git a/packages/gatsby-remark-embed-snippet/src/__tests__/index.js b/packages/gatsby-remark-embed-snippet/src/__tests__/index.js index a1c1fca9e2831..65bac278bbd0b 100644 --- a/packages/gatsby-remark-embed-snippet/src/__tests__/index.js +++ b/packages/gatsby-remark-embed-snippet/src/__tests__/index.js @@ -36,6 +36,56 @@ describe(`gatsby-remark-embed-snippet`, () => { ) }) + it(`should display a code block of a single line`, () => { + const codeBlockValue = ` console.log('hello world')` + fs.readFileSync.mockReturnValue(`function test() { +${codeBlockValue} +}`) + + const markdownAST = remark.parse(`\`embed:hello-world.js#L2\``) + const transformed = plugin({ markdownAST }, { directory: `examples` }) + + const codeBlock = transformed.children[0].children[0] + + expect(codeBlock.value).toEqual(codeBlockValue) + }) + + it(`should display a code block of a range of lines`, () => { + const codeBlockValue = ` if (window.location.search.indexOf('query') > -1) { + console.log('The user is searching') +}` + fs.readFileSync.mockReturnValue(`function test() { +${codeBlockValue} +}`) + + const markdownAST = remark.parse(`\`embed:hello-world.js#L2-4\``) + const transformed = plugin({ markdownAST }, { directory: `examples` }) + + const codeBlock = transformed.children[0].children[0] + + expect(codeBlock.value).toEqual(codeBlockValue) + }) + + it(`should display a code block of a range of non-consecutive lines`, () => { + const notInSnippet = `lineShouldNotBeInSnippet();` + fs.readFileSync.mockReturnValue(`function test() { + if (window.location.search.indexOf('query') > -1) { + console.log('The user is searching') + } +} +${notInSnippet} +window.addEventListener('resize', () => { + test(); +})`) + + const markdownAST = remark.parse(`\`embed:hello-world.js#L2-4,7-9\``) + const transformed = plugin({ markdownAST }, { directory: `examples` }) + + const codeBlock = transformed.children[0].children[0] + + expect(codeBlock.value).not.toContain(notInSnippet) + }) + it(`should error if an invalid file path is specified`, () => { fs.existsSync.mockImplementation(path => path !== `examples/hello-world.js`) diff --git a/packages/gatsby-remark-embed-snippet/src/index.js b/packages/gatsby-remark-embed-snippet/src/index.js index dd9fec7dcdd92..c5fcb8d6ff9a4 100644 --- a/packages/gatsby-remark-embed-snippet/src/index.js +++ b/packages/gatsby-remark-embed-snippet/src/index.js @@ -4,6 +4,7 @@ const path = require(`path`) const fs = require(`fs`) const normalizePath = require(`normalize-path`) const visit = require(`unist-util-visit`) +const rangeParser = require(`parse-numeric-range`) // Language defaults to extension.toLowerCase(); // This map tracks languages that don't match their extension. @@ -46,13 +47,33 @@ module.exports = ({ markdownAST, markdownNode }, { directory } = {}) => { if (value.startsWith(`embed:`)) { const file = value.substr(6) - const snippetPath = normalizePath(path.join(directory, file)) + let snippetPath = normalizePath(path.join(directory, file)) + + // Embed specific lines numbers of a file + let lines = [] + const rangePrefixIndex = snippetPath.indexOf(`#L`) + if (rangePrefixIndex > -1) { + const range = snippetPath.slice(rangePrefixIndex + 2) + if (range.length === 1) { + lines = [Number.parseInt(range, 10)] + } else { + lines = rangeParser.parse(range) + } + // Remove everything after the range prefix from file path + snippetPath = snippetPath.slice(0, rangePrefixIndex) + } if (!fs.existsSync(snippetPath)) { throw Error(`Invalid snippet specified; no such file "${snippetPath}"`) } - const code = fs.readFileSync(snippetPath, `utf8`).trim() + let code = fs.readFileSync(snippetPath, `utf8`).trim() + if (lines.length) { + code = code + .split(`\n`) + .filter((_, lineNumber) => lines.includes(lineNumber + 1)) + .join(`\n`) + } // PrismJS's theme styles are targeting pre[class*="language-"] // to apply its styles. We do the same here so that users @@ -60,7 +81,7 @@ module.exports = ({ markdownAST, markdownNode }, { directory } = {}) => { // outcome without any additional CSS. // // @see https://github.com/PrismJS/prism/blob/1d5047df37aacc900f8270b1c6215028f6988eb1/themes/prism.css#L49-L54 - const language = getLanguage(file) + const language = getLanguage(snippetPath) // Change the node type to code, insert our file as value and set language. node.type = `code` diff --git a/packages/gatsby-source-drupal/CHANGELOG.md b/packages/gatsby-source-drupal/CHANGELOG.md index fbd96ff044527..4d4deb18e7461 100644 --- a/packages/gatsby-source-drupal/CHANGELOG.md +++ b/packages/gatsby-source-drupal/CHANGELOG.md @@ -3,6 +3,12 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [3.4.3](https://github.com/gatsbyjs/gatsby/compare/gatsby-source-drupal@3.4.2...gatsby-source-drupal@3.4.3) (2020-04-10) + +### Bug Fixes + +- **gatsby-source-drupal:** Verify nodes exist before looping through them ([#22898](https://github.com/gatsbyjs/gatsby/issues/22898)) ([cdbe734](https://github.com/gatsbyjs/gatsby/commit/cdbe734)) + ## [3.4.2](https://github.com/gatsbyjs/gatsby/compare/gatsby-source-drupal@3.4.1...gatsby-source-drupal@3.4.2) (2020-03-23) **Note:** Version bump only for package gatsby-source-drupal diff --git a/packages/gatsby-source-drupal/package.json b/packages/gatsby-source-drupal/package.json index 8416ff92bfce2..77c3868d5ffac 100644 --- a/packages/gatsby-source-drupal/package.json +++ b/packages/gatsby-source-drupal/package.json @@ -1,7 +1,7 @@ { "name": "gatsby-source-drupal", "description": "Gatsby source plugin for building websites using the Drupal CMS as a data source", - "version": "3.4.2", + "version": "3.4.3", "author": "Kyle Mathews ", "bugs": { "url": "https://github.com/gatsbyjs/gatsby/issues" diff --git a/packages/gatsby-source-drupal/src/utils.js b/packages/gatsby-source-drupal/src/utils.js index 44cf6e110d66a..ba8020c98c783 100644 --- a/packages/gatsby-source-drupal/src/utils.js +++ b/packages/gatsby-source-drupal/src/utils.js @@ -147,10 +147,10 @@ const handleWebhookUpdate = async ( nodesToUpdate.push(...addedReferencedNodes) } else { // if we are inserting new node, we need to update all referenced nodes - const newNodeReferencedNodes = referencedNodesLookup - .get(newNode) - .map(id => getNode(id)) - nodesToUpdate.push(...newNodeReferencedNodes) + const newNodes = referencedNodesLookup.get(newNode) + if (typeof newNodes !== `undefined`) { + newNodes.forEach(id => nodesToUpdate.push(getNode(id))) + } } // download file diff --git a/packages/gatsby-source-graphql/CHANGELOG.md b/packages/gatsby-source-graphql/CHANGELOG.md index 2cac19ef3d781..c2cd2581bccb6 100644 --- a/packages/gatsby-source-graphql/CHANGELOG.md +++ b/packages/gatsby-source-graphql/CHANGELOG.md @@ -3,6 +3,12 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [2.3.2](https://github.com/gatsbyjs/gatsby/compare/gatsby-source-graphql@2.3.1...gatsby-source-graphql@2.3.2) (2020-04-06) + +### Bug Fixes + +- **gatsby-source-graphql:** Convert ts to plain js until better times ([#22848](https://github.com/gatsbyjs/gatsby/issues/22848)) ([ad945ec](https://github.com/gatsbyjs/gatsby/commit/ad945ec)) + ## [2.3.1](https://github.com/gatsbyjs/gatsby/compare/gatsby-source-graphql@2.3.0...gatsby-source-graphql@2.3.1) (2020-04-03) **Note:** Version bump only for package gatsby-source-graphql diff --git a/packages/gatsby-source-graphql/package.json b/packages/gatsby-source-graphql/package.json index 8f33d09126200..1345e4e467375 100644 --- a/packages/gatsby-source-graphql/package.json +++ b/packages/gatsby-source-graphql/package.json @@ -1,7 +1,7 @@ { "name": "gatsby-source-graphql", "description": "Gatsby plugin which adds a third-party GraphQL API to Gatsby GraphQL", - "version": "2.3.1", + "version": "2.3.2", "author": "Mikhail Novikov ", "bugs": { "url": "https://github.com/gatsbyjs/gatsby/issues" diff --git a/packages/gatsby-source-graphql/src/batching/__tests__/dataloader-link.ts b/packages/gatsby-source-graphql/src/batching/__tests__/dataloader-link.js similarity index 89% rename from packages/gatsby-source-graphql/src/batching/__tests__/dataloader-link.ts rename to packages/gatsby-source-graphql/src/batching/__tests__/dataloader-link.js index fa025e36c09fb..a60879a366ee4 100644 --- a/packages/gatsby-source-graphql/src/batching/__tests__/dataloader-link.ts +++ b/packages/gatsby-source-graphql/src/batching/__tests__/dataloader-link.js @@ -1,14 +1,13 @@ -import { parse } from "graphql" -import { execute } from "apollo-link" -import { createDataloaderLink } from "../dataloader-link" +const { parse } = require(`graphql`) +const { execute } = require(`apollo-link`) +const { createDataloaderLink } = require(`../dataloader-link`) const sampleQuery = parse(`{ foo }`) const expectedSampleQueryResult = { data: { foo: `bar` } } -// eslint-disable-next-line @typescript-eslint/camelcase const fetchResult = { data: { gatsby0_foo: `bar` } } -const makeFetch = (expectedResult: any = fetchResult): jest.Mock => +const makeFetch = (expectedResult = fetchResult) => jest.fn(() => Promise.resolve({ json: () => Promise.resolve(expectedResult), @@ -23,7 +22,7 @@ describe(`createDataloaderLink`, () => { }) const observable = execute(link, { query: sampleQuery }) observable.subscribe({ - next: (result: any) => { + next: result => { expect(result).toEqual(expectedSampleQueryResult) done() }, diff --git a/packages/gatsby-source-graphql/src/batching/__tests__/merge-queries.ts b/packages/gatsby-source-graphql/src/batching/__tests__/merge-queries.js similarity index 95% rename from packages/gatsby-source-graphql/src/batching/__tests__/merge-queries.ts rename to packages/gatsby-source-graphql/src/batching/__tests__/merge-queries.js index bcf6654c849f0..faa52143c0ebf 100644 --- a/packages/gatsby-source-graphql/src/batching/__tests__/merge-queries.ts +++ b/packages/gatsby-source-graphql/src/batching/__tests__/merge-queries.js @@ -1,5 +1,5 @@ -import { print, parse } from "graphql" -import { IQuery, merge, resolveResult } from "../merge-queries" +const { print, parse } = require(`graphql`) +const { merge, resolveResult } = require(`../merge-queries`) describe(`Query merging`, () => { it(`merges simple queries`, () => { @@ -213,7 +213,7 @@ describe(`Resolving merged query results`, () => { }) it(`throws on unexpected results`, () => { - const shouldThrow = (): void => { + const shouldThrow = () => { resolveResult({ data: { gatsby0_foo: `foo`, @@ -225,10 +225,8 @@ describe(`Resolving merged query results`, () => { }) }) -type QueryFixture = [string, object] - -function fromFixtures(fixtures: QueryFixture[]): IQuery[] { - return fixtures.map(([query, variables]: QueryFixture) => { +function fromFixtures(fixtures) { + return fixtures.map(([query, variables]) => { return { query: parse(query), variables, diff --git a/packages/gatsby-source-graphql/src/batching/dataloader-link.ts b/packages/gatsby-source-graphql/src/batching/dataloader-link.js similarity index 67% rename from packages/gatsby-source-graphql/src/batching/dataloader-link.ts rename to packages/gatsby-source-graphql/src/batching/dataloader-link.js index 87895e3ace8df..8967befce06ae 100644 --- a/packages/gatsby-source-graphql/src/batching/dataloader-link.ts +++ b/packages/gatsby-source-graphql/src/batching/dataloader-link.js @@ -1,22 +1,14 @@ -import DataLoader from "dataloader" -import { ApolloLink, Observable, Operation, FetchResult } from "apollo-link" -import { print } from "graphql" -import { IQuery, IQueryResult, merge, resolveResult } from "./merge-queries" +const DataLoader = require(`dataloader`) +const { ApolloLink, Observable } = require(`apollo-link`) +const { print } = require(`graphql`) +const { merge, resolveResult } = require(`./merge-queries`) -interface IOptions { - uri: string - fetch: Function - fetchOptions?: object - dataLoaderOptions?: object - headers?: object -} - -export function createDataloaderLink(options: IOptions): ApolloLink { - const load = async (keys: ReadonlyArray): Promise => { +export function createDataloaderLink(options) { + const load = async keys => { const query = merge(keys) - const result: object = await request(query, options) + const result = await request(query, options) if (!isValidGraphQLResult(result)) { - const error: any = new Error( + const error = new Error( `Failed to load query batch:\n${formatErrors(result)}` ) error.name = `GraphQLError` @@ -34,12 +26,12 @@ export function createDataloaderLink(options: IOptions): ApolloLink { const dataloader = new DataLoader(load, { cache: false, maxBatchSize, - batchScheduleFn: (callback): any => setTimeout(callback, 50), + batchScheduleFn: callback => setTimeout(callback, 50), ...options.dataLoaderOptions, }) return new ApolloLink( - (operation: Operation): Observable => + operation => new Observable(observer => { const { query, variables } = operation @@ -61,7 +53,7 @@ export function createDataloaderLink(options: IOptions): ApolloLink { ) } -function formatErrors(result: any): string { +function formatErrors(result) { if (result?.errors?.length > 0) { return result.errors .map(error => { @@ -75,7 +67,7 @@ function formatErrors(result: any): string { return `Unexpected GraphQL result` } -function isValidGraphQLResult(response): response is IQueryResult { +function isValidGraphQLResult(response) { return ( response && response.data && @@ -83,7 +75,7 @@ function isValidGraphQLResult(response): response is IQueryResult { ) } -async function request(query: IQuery, options: IOptions): Promise { +async function request(query, options) { const { uri, headers = {}, fetch, fetchOptions } = options const body = JSON.stringify({ diff --git a/packages/gatsby-source-graphql/src/batching/merge-queries.ts b/packages/gatsby-source-graphql/src/batching/merge-queries.js similarity index 68% rename from packages/gatsby-source-graphql/src/batching/merge-queries.ts rename to packages/gatsby-source-graphql/src/batching/merge-queries.js index 3f1229b836792..85474b9cbf2e1 100644 --- a/packages/gatsby-source-graphql/src/batching/merge-queries.ts +++ b/packages/gatsby-source-graphql/src/batching/merge-queries.js @@ -1,35 +1,9 @@ -import { - visit, - visitInParallel, - Kind, - DocumentNode, - VariableNode, - SelectionNode, - FragmentSpreadNode, - FragmentDefinitionNode, - InlineFragmentNode, - FieldNode, - NameNode, - OperationDefinitionNode, - Visitor, - ASTKindToNode, - VariableDefinitionNode, - DirectiveNode, -} from "graphql" -import _ from "lodash" - -export interface IQuery { - query: DocumentNode - variables: object -} - -export interface IQueryResult { - data: object -} +const { visit, visitInParallel, Kind } = require(`graphql`) +const _ = require(`lodash`) const Prefix = { - create: (index: number): string => `gatsby${index}_`, - parseKey: (prefixedKey: string): { index: number; originalKey: string } => { + create: index => `gatsby${index}_`, + parseKey: prefixedKey => { const match = /^gatsby([\d]+)_(.*)$/.exec(prefixedKey) if (!match || match.length !== 3 || isNaN(Number(match[1])) || !match[2]) { throw new Error(`Unexpected data key: ${prefixedKey}`) @@ -74,13 +48,13 @@ const Prefix = { * } * fragment FooQuery on Query { baz } */ -export function merge(queries: ReadonlyArray): IQuery { - const mergedVariables: object = {} - const mergedVariableDefinitions: VariableDefinitionNode[] = [] - const mergedSelections: SelectionNode[] = [] - const mergedFragmentMap: Map = new Map() +export function merge(queries) { + const mergedVariables = {} + const mergedVariableDefinitions = [] + const mergedSelections = [] + const mergedFragmentMap = new Map() - queries.forEach((query: IQuery, index: number) => { + queries.forEach((query, index) => { const prefixedQuery = prefixQueryParts(Prefix.create(index), query) prefixedQuery.query.definitions.forEach(def => { @@ -99,7 +73,7 @@ export function merge(queries: ReadonlyArray): IQuery { Object.assign(mergedVariables, prefixedQuery.variables) }) - const mergedQueryDefinition: OperationDefinitionNode = { + const mergedQueryDefinition = { kind: Kind.OPERATION_DEFINITION, operation: `query`, variableDefinitions: mergedVariableDefinitions, @@ -121,72 +95,60 @@ export function merge(queries: ReadonlyArray): IQuery { /** * Split and transform result of the query produced by the `merge` function */ -export function resolveResult(mergedQueryResult: IQueryResult): IQueryResult[] { +export function resolveResult(mergedQueryResult) { const data = mergedQueryResult.data - return Object.keys(data).reduce( - (acc: IQueryResult[], prefixedKey: string): IQueryResult[] => { - const { index, originalKey } = Prefix.parseKey(prefixedKey) - if (!acc[index]) acc[index] = { data: {} } - acc[index].data[originalKey] = data[prefixedKey] - return acc - }, - [] - ) + return Object.keys(data).reduce((acc, prefixedKey) => { + const { index, originalKey } = Prefix.parseKey(prefixedKey) + if (!acc[index]) acc[index] = { data: {} } + acc[index].data[originalKey] = data[prefixedKey] + return acc + }, []) } const Visitors = { - detectFragmentsWithVariables: ( - fragmentsWithVariables: Set - ): Visitor => { + detectFragmentsWithVariables: fragmentsWithVariables => { let currentFragmentName return { [Kind.FRAGMENT_DEFINITION]: { - enter: (def: FragmentDefinitionNode): void => { + enter: def => { currentFragmentName = def.name.value }, - leave: (): void => { + leave: () => { currentFragmentName = null }, }, - [Kind.VARIABLE]: (): void => { + [Kind.VARIABLE]: () => { if (currentFragmentName) { fragmentsWithVariables.add(currentFragmentName) } }, } }, - prefixVariables: (prefix: string): Visitor => { + prefixVariables: prefix => { return { - [Kind.VARIABLE]: (variable: VariableNode): VariableNode => - prefixNodeName(variable, prefix), + [Kind.VARIABLE]: variable => prefixNodeName(variable, prefix), } }, - prefixFragmentNames: ( - prefix: string, - fragmentNames: Set - ): Visitor => { + prefixFragmentNames: (prefix, fragmentNames) => { return { - [Kind.FRAGMENT_DEFINITION]: ( - def: FragmentDefinitionNode - ): FragmentDefinitionNode | void => + [Kind.FRAGMENT_DEFINITION]: def => fragmentNames.has(def.name.value) ? prefixNodeName(def, prefix) : def, - - [Kind.FRAGMENT_SPREAD]: (def: FragmentSpreadNode): FragmentSpreadNode => + [Kind.FRAGMENT_SPREAD]: def => fragmentNames.has(def.name.value) ? prefixNodeName(def, prefix) : def, } }, } -function prefixQueryParts(prefix: string, query: IQuery): IQuery { - let document: DocumentNode = aliasTopLevelFields(prefix, query.query) +function prefixQueryParts(prefix, query) { + let document = aliasTopLevelFields(prefix, query.query) const variableNames = Object.keys(query.variables) if (variableNames.length === 0) { return { ...query, query: document } } - const fragmentsWithVariables: Set = new Set() + const fragmentsWithVariables = new Set() document = visit( document, @@ -211,7 +173,7 @@ function prefixQueryParts(prefix: string, query: IQuery): IQuery { [Kind.INLINE_FRAGMENT]: [`selectionSet`], [Kind.FIELD]: [`selectionSet`], [Kind.SELECTION_SET]: [`selections`], - } as any + } ) } @@ -231,9 +193,9 @@ function prefixQueryParts(prefix: string, query: IQuery): IQuery { * * @see aliasFieldsInSelection for implementation details */ -function aliasTopLevelFields(prefix: string, doc: DocumentNode): DocumentNode { +function aliasTopLevelFields(prefix, doc) { const transformer = { - [Kind.OPERATION_DEFINITION]: (def): OperationDefinitionNode => { + [Kind.OPERATION_DEFINITION]: def => { const { selections } = def.selectionSet return { ...def, @@ -244,7 +206,7 @@ function aliasTopLevelFields(prefix: string, doc: DocumentNode): DocumentNode { } }, } - return visit(doc, transformer, { [Kind.DOCUMENT]: [`definitions`] } as any) + return visit(doc, transformer, { [Kind.DOCUMENT]: [`definitions`] }) } /** @@ -267,26 +229,18 @@ function aliasTopLevelFields(prefix: string, doc: DocumentNode): DocumentNode { * ... on Query { gatsby1_bar: bar } * } */ -function aliasFieldsInSelection( - prefix: string, - selections: ReadonlyArray, - document: DocumentNode -): SelectionNode[] { - return _.flatMap(selections, (selection: SelectionNode): SelectionNode[] => { +function aliasFieldsInSelection(prefix, selections, document) { + return _.flatMap(selections, selection => { switch (selection.kind) { case Kind.INLINE_FRAGMENT: return [aliasFieldsInInlineFragment(prefix, selection, document)] - case Kind.FRAGMENT_SPREAD: { const inlineFragment = inlineFragmentSpread(selection, document) return [ addSkipDirective(selection), aliasFieldsInInlineFragment(prefix, inlineFragment, document), - // Keep original spread in selection with @skip(if: true) - // otherwise if this was a single fragment usage the query will fail validation ] } - case Kind.FIELD: default: return [aliasField(selection, prefix)] @@ -294,12 +248,8 @@ function aliasFieldsInSelection( }) } -interface INodeWithDirectives { - readonly directives?: ReadonlyArray -} - -function addSkipDirective(node: T): T { - const skipDirective: DirectiveNode = { +function addSkipDirective(node) { + const skipDirective = { kind: Kind.DIRECTIVE, name: { kind: Kind.NAME, value: `skip` }, arguments: [ @@ -325,11 +275,7 @@ function addSkipDirective(node: T): T { * To * ... on Query { gatsby1_foo: foo, ... on Query { gatsby1_bar: foo } } */ -function aliasFieldsInInlineFragment( - prefix: string, - fragment: InlineFragmentNode, - document: DocumentNode -): InlineFragmentNode { +function aliasFieldsInInlineFragment(prefix, fragment, document) { const { selections } = fragment.selectionSet return { ...fragment, @@ -350,10 +296,7 @@ function aliasFieldsInInlineFragment( * Transforms to: * query { ... on Query { bar } } */ -function inlineFragmentSpread( - spread: FragmentSpreadNode, - document: DocumentNode -): InlineFragmentNode { +function inlineFragmentSpread(spread, document) { const fragment = document.definitions.find( def => def.kind === Kind.FRAGMENT_DEFINITION && @@ -362,8 +305,7 @@ function inlineFragmentSpread( if (!fragment) { throw new Error(`Fragment ${spread.name.value} does not exist`) } - const { typeCondition, selectionSet } = fragment as FragmentDefinitionNode - + const { typeCondition, selectionSet } = fragment return { kind: Kind.INLINE_FRAGMENT, typeCondition, @@ -372,10 +314,7 @@ function inlineFragmentSpread( } } -function prefixNodeName( - namedNode: T, - prefix: string -): T { +function prefixNodeName(namedNode, prefix) { return { ...namedNode, name: { @@ -392,7 +331,7 @@ function prefixNodeName( * { foo } -> { gatsby1_foo: foo } * { foo: bar } -> { gatsby1_foo: bar } */ -function aliasField(field: FieldNode, aliasPrefix: string): FieldNode { +function aliasField(field, aliasPrefix) { const aliasNode = field.alias ? field.alias : field.name return { ...field, @@ -403,10 +342,10 @@ function aliasField(field: FieldNode, aliasPrefix: string): FieldNode { } } -function isQueryDefinition(def): def is OperationDefinitionNode { +function isQueryDefinition(def) { return def.kind === Kind.OPERATION_DEFINITION && def.operation === `query` } -function isFragmentDefinition(def): def is FragmentDefinitionNode { +function isFragmentDefinition(def) { return def.kind === Kind.FRAGMENT_DEFINITION } diff --git a/packages/gatsby-theme-blog-core/CHANGELOG.md b/packages/gatsby-theme-blog-core/CHANGELOG.md index 4b91215666005..3f07efde41a98 100644 --- a/packages/gatsby-theme-blog-core/CHANGELOG.md +++ b/packages/gatsby-theme-blog-core/CHANGELOG.md @@ -3,6 +3,34 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [1.3.20](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-blog-core@1.3.19...gatsby-theme-blog-core@1.3.20) (2020-04-11) + +**Note:** Version bump only for package gatsby-theme-blog-core + +## [1.3.19](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-blog-core@1.3.18...gatsby-theme-blog-core@1.3.19) (2020-04-10) + +**Note:** Version bump only for package gatsby-theme-blog-core + +## [1.3.18](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-blog-core@1.3.17...gatsby-theme-blog-core@1.3.18) (2020-04-10) + +**Note:** Version bump only for package gatsby-theme-blog-core + +## [1.3.17](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-blog-core@1.3.16...gatsby-theme-blog-core@1.3.17) (2020-04-09) + +**Note:** Version bump only for package gatsby-theme-blog-core + +## [1.3.16](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-blog-core@1.3.15...gatsby-theme-blog-core@1.3.16) (2020-04-08) + +**Note:** Version bump only for package gatsby-theme-blog-core + +## [1.3.15](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-blog-core@1.3.14...gatsby-theme-blog-core@1.3.15) (2020-04-07) + +**Note:** Version bump only for package gatsby-theme-blog-core + +## [1.3.14](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-blog-core@1.3.13...gatsby-theme-blog-core@1.3.14) (2020-04-07) + +**Note:** Version bump only for package gatsby-theme-blog-core + ## [1.3.13](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-blog-core@1.3.12...gatsby-theme-blog-core@1.3.13) (2020-04-04) **Note:** Version bump only for package gatsby-theme-blog-core diff --git a/packages/gatsby-theme-blog-core/package.json b/packages/gatsby-theme-blog-core/package.json index 662fd8fd6fdf4..c18426ca29a83 100644 --- a/packages/gatsby-theme-blog-core/package.json +++ b/packages/gatsby-theme-blog-core/package.json @@ -1,6 +1,6 @@ { "name": "gatsby-theme-blog-core", - "version": "1.3.13", + "version": "1.3.20", "main": "index.js", "author": "christopherbiscardi (@chrisbiscardi)", "license": "MIT", @@ -19,7 +19,7 @@ "dependencies": { "@mdx-js/mdx": "^1.5.7", "gatsby-core-utils": "^1.1.1", - "gatsby-plugin-mdx": "^1.1.4", + "gatsby-plugin-mdx": "^1.1.7", "gatsby-plugin-sharp": "^2.5.4", "gatsby-remark-copy-linked-files": "^2.2.1", "gatsby-remark-images": "^3.2.2", @@ -30,7 +30,7 @@ }, "devDependencies": { "@mdx-js/react": "^1.5.7", - "gatsby": "^2.20.12", + "gatsby": "^2.20.18", "prettier": "^1.19.1", "react": "^16.12.0", "react-dom": "^16.12.0" diff --git a/packages/gatsby-theme-blog/CHANGELOG.md b/packages/gatsby-theme-blog/CHANGELOG.md index 31f73cb2c1835..ec895f4c7a596 100644 --- a/packages/gatsby-theme-blog/CHANGELOG.md +++ b/packages/gatsby-theme-blog/CHANGELOG.md @@ -3,6 +3,34 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [1.4.20](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-blog@1.4.19...gatsby-theme-blog@1.4.20) (2020-04-11) + +**Note:** Version bump only for package gatsby-theme-blog + +## [1.4.19](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-blog@1.4.18...gatsby-theme-blog@1.4.19) (2020-04-10) + +**Note:** Version bump only for package gatsby-theme-blog + +## [1.4.18](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-blog@1.4.17...gatsby-theme-blog@1.4.18) (2020-04-10) + +**Note:** Version bump only for package gatsby-theme-blog + +## [1.4.17](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-blog@1.4.16...gatsby-theme-blog@1.4.17) (2020-04-09) + +**Note:** Version bump only for package gatsby-theme-blog + +## [1.4.16](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-blog@1.4.15...gatsby-theme-blog@1.4.16) (2020-04-08) + +**Note:** Version bump only for package gatsby-theme-blog + +## [1.4.15](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-blog@1.4.14...gatsby-theme-blog@1.4.15) (2020-04-07) + +**Note:** Version bump only for package gatsby-theme-blog + +## [1.4.14](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-blog@1.4.13...gatsby-theme-blog@1.4.14) (2020-04-07) + +**Note:** Version bump only for package gatsby-theme-blog + ## [1.4.13](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-blog@1.4.12...gatsby-theme-blog@1.4.13) (2020-04-04) **Note:** Version bump only for package gatsby-theme-blog diff --git a/packages/gatsby-theme-blog/package.json b/packages/gatsby-theme-blog/package.json index 841875cabdb5a..a3c66106f8371 100644 --- a/packages/gatsby-theme-blog/package.json +++ b/packages/gatsby-theme-blog/package.json @@ -1,6 +1,6 @@ { "name": "gatsby-theme-blog", - "version": "1.4.13", + "version": "1.4.20", "description": "A Gatsby theme for miscellaneous blogging with a dark/light mode", "main": "index.js", "keywords": [ @@ -23,13 +23,13 @@ "@theme-ui/prism": "^0.2.50", "@theme-ui/typography": "^0.2.46", "deepmerge": "^4.2.2", - "gatsby-image": "^2.3.1", + "gatsby-image": "^2.3.2", "gatsby-plugin-emotion": "^4.2.1", "gatsby-plugin-feed": "^2.4.1", - "gatsby-plugin-react-helmet": "^3.2.1", + "gatsby-plugin-react-helmet": "^3.2.2", "gatsby-plugin-theme-ui": "^0.2.53", - "gatsby-plugin-twitter": "^2.2.1", - "gatsby-theme-blog-core": "^1.3.13", + "gatsby-plugin-twitter": "^2.2.2", + "gatsby-theme-blog-core": "^1.3.20", "mdx-utils": "0.2.0", "react-helmet": "^5.2.1", "react-switch": "^5.0.1", @@ -39,7 +39,7 @@ "typography-theme-wordpress-2016": "^0.16.19" }, "devDependencies": { - "gatsby": "^2.20.12", + "gatsby": "^2.20.18", "prettier": "^1.19.1", "react": "^16.12.0", "react-dom": "^16.12.0" diff --git a/packages/gatsby-theme-notes/CHANGELOG.md b/packages/gatsby-theme-notes/CHANGELOG.md index f87cd588a59bd..0917781b03da1 100644 --- a/packages/gatsby-theme-notes/CHANGELOG.md +++ b/packages/gatsby-theme-notes/CHANGELOG.md @@ -3,6 +3,34 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [1.2.19](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-notes@1.2.18...gatsby-theme-notes@1.2.19) (2020-04-11) + +**Note:** Version bump only for package gatsby-theme-notes + +## [1.2.18](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-notes@1.2.17...gatsby-theme-notes@1.2.18) (2020-04-10) + +**Note:** Version bump only for package gatsby-theme-notes + +## [1.2.17](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-notes@1.2.16...gatsby-theme-notes@1.2.17) (2020-04-10) + +**Note:** Version bump only for package gatsby-theme-notes + +## [1.2.16](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-notes@1.2.15...gatsby-theme-notes@1.2.16) (2020-04-09) + +**Note:** Version bump only for package gatsby-theme-notes + +## [1.2.15](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-notes@1.2.14...gatsby-theme-notes@1.2.15) (2020-04-08) + +**Note:** Version bump only for package gatsby-theme-notes + +## [1.2.14](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-notes@1.2.13...gatsby-theme-notes@1.2.14) (2020-04-07) + +**Note:** Version bump only for package gatsby-theme-notes + +## [1.2.13](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-notes@1.2.12...gatsby-theme-notes@1.2.13) (2020-04-07) + +**Note:** Version bump only for package gatsby-theme-notes + ## [1.2.12](https://github.com/gatsbyjs/gatsby/compare/gatsby-theme-notes@1.2.11...gatsby-theme-notes@1.2.12) (2020-04-03) **Note:** Version bump only for package gatsby-theme-notes diff --git a/packages/gatsby-theme-notes/package.json b/packages/gatsby-theme-notes/package.json index 2269d7dfcfcc2..41e591c308f7c 100644 --- a/packages/gatsby-theme-notes/package.json +++ b/packages/gatsby-theme-notes/package.json @@ -1,7 +1,7 @@ { "name": "gatsby-theme-notes", "description": "Gatsby Theme for adding a notes section to your website", - "version": "1.2.12", + "version": "1.2.19", "author": "John Otander", "license": "MIT", "main": "index.js", @@ -20,7 +20,7 @@ }, "homepage": "https://github.com/gatsbyjs/gatsby/tree/master/packages/gatsby-theme-notes#readme", "devDependencies": { - "gatsby": "^2.20.12", + "gatsby": "^2.20.18", "react": "^16.12.0", "react-dom": "^16.12.0" }, @@ -39,7 +39,7 @@ "gatsby-core-utils": "^1.1.1", "gatsby-plugin-compile-es6-packages": "^2.1.0", "gatsby-plugin-emotion": "^4.2.1", - "gatsby-plugin-mdx": "^1.1.4", + "gatsby-plugin-mdx": "^1.1.7", "gatsby-plugin-meta-redirect": "^1.1.1", "gatsby-plugin-og-image": "0.0.1", "gatsby-plugin-redirects": "^1.0.0", diff --git a/packages/gatsby-transformer-documentationjs/CHANGELOG.md b/packages/gatsby-transformer-documentationjs/CHANGELOG.md index 56f5d57f6acd8..408e701ce7a84 100644 --- a/packages/gatsby-transformer-documentationjs/CHANGELOG.md +++ b/packages/gatsby-transformer-documentationjs/CHANGELOG.md @@ -3,6 +3,12 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [4.2.2](https://github.com/gatsbyjs/gatsby/compare/gatsby-transformer-documentationjs@4.2.1...gatsby-transformer-documentationjs@4.2.2) (2020-04-10) + +### Bug Fixes + +- create unique Nodes ([#22774](https://github.com/gatsbyjs/gatsby/issues/22774)) ([1381a0b](https://github.com/gatsbyjs/gatsby/commit/1381a0b)) + ## [4.2.1](https://github.com/gatsbyjs/gatsby/compare/gatsby-transformer-documentationjs@4.2.0...gatsby-transformer-documentationjs@4.2.1) (2020-03-23) **Note:** Version bump only for package gatsby-transformer-documentationjs diff --git a/packages/gatsby-transformer-documentationjs/package.json b/packages/gatsby-transformer-documentationjs/package.json index 5bbbf2db25b74..42a5a9c675150 100644 --- a/packages/gatsby-transformer-documentationjs/package.json +++ b/packages/gatsby-transformer-documentationjs/package.json @@ -1,7 +1,7 @@ { "name": "gatsby-transformer-documentationjs", "description": "Gatsby transformer plugin which uses Documentation.js to extract JavaScript documentation", - "version": "4.2.1", + "version": "4.2.2", "author": "Kyle Mathews", "bugs": { "url": "https://github.com/gatsbyjs/gatsby/issues" diff --git a/packages/gatsby-transformer-documentationjs/src/__tests__/__snapshots__/gatsby-node.js.snap b/packages/gatsby-transformer-documentationjs/src/__tests__/__snapshots__/gatsby-node.js.snap index 105d9376297cd..8c960d06742fe 100644 --- a/packages/gatsby-transformer-documentationjs/src/__tests__/__snapshots__/gatsby-node.js.snap +++ b/packages/gatsby-transformer-documentationjs/src/__tests__/__snapshots__/gatsby-node.js.snap @@ -21,11 +21,11 @@ Object { exports[`transformer-react-doc-gen: onCreateNode Complex example should handle typedefs should handle type applications 1`] = ` Object { "children": Array [ - "documentationJS node_1 path #[{\\"name\\":\\"ObjectType\\",\\"kind\\":\\"typedef\\"},{\\"fieldName\\":\\"properties\\",\\"fieldIndex\\":0}] line 3--DocumentationJSComponentDescription--comment.description", + "documentationJS documentationJS node_1 path #[{\\"name\\":\\"ObjectType\\",\\"kind\\":\\"typedef\\"}] line 12 path #[{\\"name\\":\\"ObjectType\\",\\"kind\\":\\"typedef\\"},{\\"fieldName\\":\\"properties\\",\\"fieldIndex\\":0}] line 3--DocumentationJSComponentDescription--comment.description", ], "commentNumber": null, - "description___NODE": "documentationJS node_1 path #[{\\"name\\":\\"ObjectType\\",\\"kind\\":\\"typedef\\"},{\\"fieldName\\":\\"properties\\",\\"fieldIndex\\":0}] line 3--DocumentationJSComponentDescription--comment.description", - "id": "documentationJS node_1 path #[{\\"name\\":\\"ObjectType\\",\\"kind\\":\\"typedef\\"},{\\"fieldName\\":\\"properties\\",\\"fieldIndex\\":0}] line 3", + "description___NODE": "documentationJS documentationJS node_1 path #[{\\"name\\":\\"ObjectType\\",\\"kind\\":\\"typedef\\"}] line 12 path #[{\\"name\\":\\"ObjectType\\",\\"kind\\":\\"typedef\\"},{\\"fieldName\\":\\"properties\\",\\"fieldIndex\\":0}] line 3--DocumentationJSComponentDescription--comment.description", + "id": "documentationJS documentationJS node_1 path #[{\\"name\\":\\"ObjectType\\",\\"kind\\":\\"typedef\\"}] line 12 path #[{\\"name\\":\\"ObjectType\\",\\"kind\\":\\"typedef\\"},{\\"fieldName\\":\\"properties\\",\\"fieldIndex\\":0}] line 3", "internal": Object { "contentDigest": "content-digest", "type": "DocumentationJs", diff --git a/packages/gatsby-transformer-documentationjs/src/gatsby-node.js b/packages/gatsby-transformer-documentationjs/src/gatsby-node.js index a45bddd7bc9c8..d3e34aaf4323f 100644 --- a/packages/gatsby-transformer-documentationjs/src/gatsby-node.js +++ b/packages/gatsby-transformer-documentationjs/src/gatsby-node.js @@ -274,7 +274,7 @@ exports.onCreateNode = async ({ node, actions, ...helpers }) => { const docSkeletonNode = { commentNumber, level, - id: createNodeId(docId(node.id, docsJson)), + id: createNodeId(docId(parent, docsJson)), parent, children: [], internal: { diff --git a/packages/gatsby/CHANGELOG.md b/packages/gatsby/CHANGELOG.md index d3d338e85a20d..780b7da8fad84 100644 --- a/packages/gatsby/CHANGELOG.md +++ b/packages/gatsby/CHANGELOG.md @@ -3,6 +3,38 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [2.20.18](https://github.com/gatsbyjs/gatsby/compare/gatsby@2.20.17...gatsby@2.20.18) (2020-04-11) + +### Bug Fixes + +- **gatsby:** Use `moveSync` over `renameSync` to fix cross mount cases ([#23029](https://github.com/gatsbyjs/gatsby/issues/23029)) ([96f8d4b](https://github.com/gatsbyjs/gatsby/commit/96f8d4b)) + +## [2.20.17](https://github.com/gatsbyjs/gatsby/compare/gatsby@2.20.16...gatsby@2.20.17) (2020-04-10) + +**Note:** Version bump only for package gatsby + +## [2.20.16](https://github.com/gatsbyjs/gatsby/compare/gatsby@2.20.15...gatsby@2.20.16) (2020-04-10) + +### Bug Fixes + +- **gatsby:** Use tmp dir for tmp redux cache folder ([#22959](https://github.com/gatsbyjs/gatsby/issues/22959)) ([86cf920](https://github.com/gatsbyjs/gatsby/commit/86cf920)) + +## [2.20.15](https://github.com/gatsbyjs/gatsby/compare/gatsby@2.20.14...gatsby@2.20.15) (2020-04-09) + +### Bug Fixes + +- **gatsby:** improve async commons chunking ([#22879](https://github.com/gatsbyjs/gatsby/issues/22879)) ([7cf056f](https://github.com/gatsbyjs/gatsby/commit/7cf056f)) + +## [2.20.14](https://github.com/gatsbyjs/gatsby/compare/gatsby@2.20.13...gatsby@2.20.14) (2020-04-08) + +### Performance Improvements + +- **gatsby:** support `elemMatch` as fast filter ([#22742](https://github.com/gatsbyjs/gatsby/issues/22742)) ([66b3d35](https://github.com/gatsbyjs/gatsby/commit/66b3d35)) + +## [2.20.13](https://github.com/gatsbyjs/gatsby/compare/gatsby@2.20.12...gatsby@2.20.13) (2020-04-07) + +**Note:** Version bump only for package gatsby + ## [2.20.12](https://github.com/gatsbyjs/gatsby/compare/gatsby@2.20.11...gatsby@2.20.12) (2020-04-03) ### Bug Fixes diff --git a/packages/gatsby/index.d.ts b/packages/gatsby/index.d.ts index 020f0c8fa9a50..e5919b29a1663 100644 --- a/packages/gatsby/index.d.ts +++ b/packages/gatsby/index.d.ts @@ -1,4 +1,5 @@ import * as React from "react" +import { Renderer } from "react-dom" import { EventEmitter } from "events" import { WindowLocation, NavigateFn } from "@reach/router" import { createContentDigest } from "gatsby-core-utils" @@ -537,7 +538,7 @@ export interface GatsbyBrowser { args: ReplaceComponentRendererArgs, options: PluginOptions ): any - replaceHydrateFunction?(args: BrowserPluginArgs, options: PluginOptions): any + replaceHydrateFunction?(args: BrowserPluginArgs, options: PluginOptions): Renderer shouldUpdateScroll?(args: ShouldUpdateScrollArgs, options: PluginOptions): any wrapPageElement?( args: WrapPageElementBrowserArgs, @@ -959,7 +960,13 @@ interface ActionOptions { } export interface BuildArgs extends ParentSpanPluginArgs { - graphql: Function + graphql( + query: string, + variables?: TVariables + ): Promise<{ + errors?: any + data?: TData + }> } export interface Actions { diff --git a/packages/gatsby/package.json b/packages/gatsby/package.json index 3251952b30731..74c156f1cb82f 100644 --- a/packages/gatsby/package.json +++ b/packages/gatsby/package.json @@ -1,7 +1,7 @@ { "name": "gatsby", "description": "Blazing fast modern site generator for React", - "version": "2.20.12", + "version": "2.20.18", "author": "Kyle Mathews ", "bin": { "gatsby": "./dist/bin/gatsby.js" @@ -71,7 +71,7 @@ "flat": "^4.1.0", "fs-exists-cached": "1.0.0", "fs-extra": "^8.1.0", - "gatsby-cli": "^2.11.5", + "gatsby-cli": "^2.11.7", "gatsby-core-utils": "^1.1.1", "gatsby-graphiql-explorer": "^0.3.1", "gatsby-link": "^2.3.2", diff --git a/packages/gatsby/src/bootstrap/__tests__/__snapshots__/graphql-runner.js.snap b/packages/gatsby/src/bootstrap/__tests__/__snapshots__/create-graphql-runner.js.snap similarity index 100% rename from packages/gatsby/src/bootstrap/__tests__/__snapshots__/graphql-runner.js.snap rename to packages/gatsby/src/bootstrap/__tests__/__snapshots__/create-graphql-runner.js.snap diff --git a/packages/gatsby/src/bootstrap/__tests__/graphql-runner.js b/packages/gatsby/src/bootstrap/__tests__/create-graphql-runner.js similarity index 88% rename from packages/gatsby/src/bootstrap/__tests__/graphql-runner.js rename to packages/gatsby/src/bootstrap/__tests__/create-graphql-runner.js index e9dba17611223..3445cc43dec90 100644 --- a/packages/gatsby/src/bootstrap/__tests__/graphql-runner.js +++ b/packages/gatsby/src/bootstrap/__tests__/create-graphql-runner.js @@ -1,6 +1,6 @@ jest.mock(`graphql`) -const createGraphqlRunner = require(`../graphql-runner`) +import { createGraphQLRunner } from "../create-graphql-runner" const { execute, validate, parse } = require(`graphql`) parse.mockImplementation(() => { @@ -31,7 +31,7 @@ describe(`grapqhl-runner`, () => { }) it(`should return the result when grapqhl has no errors`, async () => { - const graphqlRunner = createGraphqlRunner(createStore(), reporter) + const graphqlRunner = createGraphQLRunner(createStore(), reporter) const expectation = { data: { @@ -46,7 +46,7 @@ describe(`grapqhl-runner`, () => { }) it(`should return an errors array when structured errors found`, async () => { - const graphqlRunner = createGraphqlRunner(createStore(), reporter) + const graphqlRunner = createGraphQLRunner(createStore(), reporter) const expectation = { errors: [ @@ -64,7 +64,7 @@ describe(`grapqhl-runner`, () => { }) it(`should throw a structured error when created from createPage file`, async () => { - const graphqlRunner = createGraphqlRunner(createStore(), reporter) + const graphqlRunner = createGraphQLRunner(createStore(), reporter) const errorObject = { stack: `Error diff --git a/packages/gatsby/src/bootstrap/__tests__/requires-writer.js b/packages/gatsby/src/bootstrap/__tests__/requires-writer.js index 5506421f45541..b82fa698dc0e6 100644 --- a/packages/gatsby/src/bootstrap/__tests__/requires-writer.js +++ b/packages/gatsby/src/bootstrap/__tests__/requires-writer.js @@ -326,4 +326,29 @@ describe(`requires-writer`, () => { }) }) }) + + describe(`getComponents`, () => { + it(`should return components in a deterministic order`, () => { + const pagesInput = generatePagesState([ + { + component: `component1`, + componentChunkName: `chunkName1`, + matchPath: `matchPath1`, + path: `/path1`, + }, + { + component: `component2`, + componentChunkName: `chunkName2`, + path: `/path2`, + }, + ]) + + const pages = [...pagesInput.values()] + const pagesReversed = [...pagesInput.values()].reverse() + + expect(requiresWriter.getComponents(pages)).toEqual( + requiresWriter.getComponents(pagesReversed) + ) + }) + }) }) diff --git a/packages/gatsby/src/bootstrap/graphql-runner.js b/packages/gatsby/src/bootstrap/create-graphql-runner.ts similarity index 54% rename from packages/gatsby/src/bootstrap/graphql-runner.js rename to packages/gatsby/src/bootstrap/create-graphql-runner.ts index 3d6fb71b2b94d..35967ec130eb1 100644 --- a/packages/gatsby/src/bootstrap/graphql-runner.js +++ b/packages/gatsby/src/bootstrap/create-graphql-runner.ts @@ -1,14 +1,27 @@ -const stackTrace = require(`stack-trace`) +import stackTrace from "stack-trace" +import { ExecutionResultDataDefault } from "graphql/execution/execute" +import { Store } from "redux" -const GraphQLRunner = require(`../query/graphql-runner`).default -const errorParser = require(`../query/error-parser`).default +import GraphQLRunner from "../query/graphql-runner" +import errorParser from "../query/error-parser" +import { emitter } from "../redux" +import { Reporter } from "../.." +import { ExecutionResult, Source } from "../../graphql" +import { IGatsbyState } from "../redux/types" -const { emitter } = require(`../redux`) +type Runner = ( + query: string | Source, + context: Record +) => Promise> -module.exports = (store, reporter) => { +export const createGraphQLRunner = ( + store: Store, + reporter: Reporter +): Runner => { // TODO: Move tracking of changed state inside GraphQLRunner itself. https://github.com/gatsbyjs/gatsby/issues/20941 let runner = new GraphQLRunner(store) - ;[ + + const eventTypes: string[] = [ `DELETE_CACHE`, `CREATE_NODE`, `DELETE_NODE`, @@ -17,12 +30,15 @@ module.exports = (store, reporter) => { `SET_SCHEMA`, `ADD_FIELD_TO_NODE`, `ADD_CHILD_NODE_TO_PARENT_NODE`, - ].forEach(eventType => { - emitter.on(eventType, event => { + ] + + eventTypes.forEach(type => { + emitter.on(type, () => { runner = new GraphQLRunner(store) }) }) - return (query, context) => + + return (query, context): ReturnType => runner.query(query, context).then(result => { if (result.errors) { const structuredErrors = result.errors @@ -30,15 +46,18 @@ module.exports = (store, reporter) => { // Find the file where graphql was called. const file = stackTrace .parse(e) - .find(file => /createPages/.test(file.functionName)) + .find(file => /createPages/.test(file.getFunctionName())) if (file) { const structuredError = errorParser({ message: e.message, location: { - start: { line: file.lineNumber, column: file.columnNumber }, + start: { + line: file.getLineNumber(), + column: file.getColumnNumber(), + }, }, - filePath: file.fileName, + filePath: file.getFileName(), }) structuredError.context = { ...structuredError.context, diff --git a/packages/gatsby/src/bootstrap/index.js b/packages/gatsby/src/bootstrap/index.js index 8bee7678798a5..ffc8fc8eb413e 100644 --- a/packages/gatsby/src/bootstrap/index.js +++ b/packages/gatsby/src/bootstrap/index.js @@ -28,7 +28,7 @@ process.on(`unhandledRejection`, (reason, p) => { report.panic(reason) }) -const createGraphqlRunner = require(`./graphql-runner`) +import { createGraphQLRunner } from "./create-graphql-runner" const { extractQueries } = require(`../query/query-watcher`) const requiresWriter = require(`./requires-writer`) const { writeRedirects } = require(`./redirects-writer`) @@ -469,7 +469,7 @@ module.exports = async (args: BootstrapArgs) => { payload: _.flattenDeep([extensions, apiResults]), }) - const graphqlRunner = createGraphqlRunner(store, report) + const graphqlRunner = createGraphQLRunner(store, report) // Collect pages. activity = report.activityTimer(`createPages`, { diff --git a/packages/gatsby/src/bootstrap/requires-writer.js b/packages/gatsby/src/bootstrap/requires-writer.js index f1c24b6061791..e34c0c72a948e 100644 --- a/packages/gatsby/src/bootstrap/requires-writer.js +++ b/packages/gatsby/src/bootstrap/requires-writer.js @@ -52,6 +52,7 @@ const getComponents = pages => _(pages) .map(pickComponentFields) .uniqBy(c => c.componentChunkName) + .orderBy(c => c.componentChunkName) .value() /** @@ -254,4 +255,5 @@ module.exports = { writeAll, resetLastHash, startListener, + getComponents, } diff --git a/packages/gatsby/src/bootstrap/schema-hot-reloader.ts b/packages/gatsby/src/bootstrap/schema-hot-reloader.ts index 808b0ab70b89c..2cdd1efcaeab1 100644 --- a/packages/gatsby/src/bootstrap/schema-hot-reloader.ts +++ b/packages/gatsby/src/bootstrap/schema-hot-reloader.ts @@ -7,7 +7,6 @@ import report from "gatsby-cli/lib/reporter" import { IGatsbyState } from "../redux/types" type TypeMap = IGatsbyState["inferenceMetadata"]["typeMap"] -type SchemaCustomization = IGatsbyState["schemaCustomization"] type InferenceMetadata = IGatsbyState["inferenceMetadata"] const inferredTypesChanged = ( @@ -19,41 +18,46 @@ const inferredTypesChanged = ( typeMap[type].dirty && !haveEqualFields(typeMap[type], prevTypeMap[type]) ) -const schemaChanged = ( - schemaCustomization: SchemaCustomization, - lastSchemaCustomization: SchemaCustomization -): boolean => - [`fieldExtensions`, `printConfig`, `thirdPartySchemas`, `types`].some( - key => schemaCustomization[key] !== lastSchemaCustomization[key] - ) - let lastMetadata: InferenceMetadata -let lastSchemaCustomization: SchemaCustomization // API_RUNNING_QUEUE_EMPTY could be emitted multiple types // in a short period of time, so debounce seems reasonable const maybeRebuildSchema = debounce(async (): Promise => { - const { inferenceMetadata, schemaCustomization } = store.getState() + const { inferenceMetadata } = store.getState() - if ( - !inferredTypesChanged(inferenceMetadata.typeMap, lastMetadata.typeMap) && - !schemaChanged(schemaCustomization, lastSchemaCustomization) - ) { + if (!inferredTypesChanged(inferenceMetadata.typeMap, lastMetadata.typeMap)) { return } const activity = report.activityTimer(`rebuild schema`) activity.start() - lastMetadata = cloneDeep(inferenceMetadata) - lastSchemaCustomization = schemaCustomization await rebuild({ parentSpan: activity }) await updateStateAndRunQueries(false, { parentSpan: activity }) activity.end() }, 1000) -export const bootstrapSchemaHotReloader = (): void => { - const { inferenceMetadata, schemaCustomization } = store.getState() +function snapshotInferenceMetadata(): void { + const { inferenceMetadata } = store.getState() lastMetadata = cloneDeep(inferenceMetadata) - lastSchemaCustomization = schemaCustomization +} + +export function bootstrapSchemaHotReloader(): void { + // Snapshot inference metadata at the time of the last schema rebuild + // (even if schema was rebuilt elsewhere) + // Using the snapshot later to check if inferred types actually changed since the last rebuild + snapshotInferenceMetadata() + emitter.on(`SET_SCHEMA`, snapshotInferenceMetadata) + + startSchemaHotReloader() +} + +export function startSchemaHotReloader(): void { + // Listen for node changes outside of a regular sourceNodes API call, + // e.g. markdown file update via watcher emitter.on(`API_RUNNING_QUEUE_EMPTY`, maybeRebuildSchema) } + +export function stopSchemaHotReloader(): void { + emitter.off(`API_RUNNING_QUEUE_EMPTY`, maybeRebuildSchema) + maybeRebuildSchema.cancel() +} diff --git a/packages/gatsby/src/commands/develop.ts b/packages/gatsby/src/commands/develop.ts index 3f06dea88627c..d647ea85b41bf 100644 --- a/packages/gatsby/src/commands/develop.ts +++ b/packages/gatsby/src/commands/develop.ts @@ -31,12 +31,17 @@ import * as WorkerPool from "../utils/worker/pool" import http from "http" import https from "https" -import { bootstrapSchemaHotReloader } from "../bootstrap/schema-hot-reloader" +import { + bootstrapSchemaHotReloader, + startSchemaHotReloader, + stopSchemaHotReloader, +} from "../bootstrap/schema-hot-reloader" import bootstrapPageHotReloader from "../bootstrap/page-hot-reloader" import { developStatic } from "./develop-static" import withResolverContext from "../schema/context" import sourceNodes from "../utils/source-nodes" import { createSchemaCustomization } from "../utils/create-schema-customization" +import { rebuild as rebuildSchema } from "../schema" import { websocketManager } from "../utils/websocket-manager" import getSslCert from "../utils/get-ssl-cert" import { slash } from "gatsby-core-utils" @@ -203,6 +208,7 @@ async function startServer(program: IProgram): Promise { **/ const REFRESH_ENDPOINT = `/__refresh` const refresh = async (req: express.Request): Promise => { + stopSchemaHotReloader() let activity = report.activityTimer(`createSchemaCustomization`, {}) activity.start() await createSchemaCustomization({ @@ -215,6 +221,11 @@ async function startServer(program: IProgram): Promise { webhookBody: req.body, }) activity.end() + activity = report.activityTimer(`rebuild schema`) + activity.start() + await rebuildSchema({ parentSpan: activity }) + activity.end() + startSchemaHotReloader() } app.use(REFRESH_ENDPOINT, express.json()) app.post(REFRESH_ENDPOINT, (req, res) => { diff --git a/packages/gatsby/src/db/__tests__/fixtures/ensure-loki.js b/packages/gatsby/src/db/__tests__/fixtures/ensure-loki.js index e02a612f28ce0..6f242c1cbbcb0 100644 --- a/packages/gatsby/src/db/__tests__/fixtures/ensure-loki.js +++ b/packages/gatsby/src/db/__tests__/fixtures/ensure-loki.js @@ -4,5 +4,8 @@ module.exports = () => { if (backend === `loki`) { const lokiDb = require(`../../loki`) beforeAll(lokiDb.start) + return true } + + return false } diff --git a/packages/gatsby/src/query/graphql-runner.ts b/packages/gatsby/src/query/graphql-runner.ts index ab45a4855380a..75fb944a3e1af 100644 --- a/packages/gatsby/src/query/graphql-runner.ts +++ b/packages/gatsby/src/query/graphql-runner.ts @@ -1,4 +1,5 @@ import crypto from "crypto" +import v8 from "v8" import { parse, validate, @@ -169,7 +170,7 @@ export default class GraphQLRunner { crypto .createHash(`sha1`) .update(statsQuery) - .update(JSON.stringify(context)) + .update(v8.serialize(context)) .digest(`hex`) ) diff --git a/packages/gatsby/src/query/queue.js b/packages/gatsby/src/query/queue.js index c690f5645bfc2..e0bdd64d93aae 100644 --- a/packages/gatsby/src/query/queue.js +++ b/packages/gatsby/src/query/queue.js @@ -10,6 +10,7 @@ const createBaseOptions = () => { concurrent: Number(process.env.GATSBY_EXPERIMENTAL_QUERY_CONCURRENCY) || 4, // eslint-disable-next-line new-cap store: FastMemoryStore(), + maxTimeout: 15000, } } diff --git a/packages/gatsby/src/redux/__tests__/index.js b/packages/gatsby/src/redux/__tests__/index.js index e90eb1742765f..addc4d52aaf4f 100644 --- a/packages/gatsby/src/redux/__tests__/index.js +++ b/packages/gatsby/src/redux/__tests__/index.js @@ -16,7 +16,7 @@ jest.mock(`fs-extra`, () => { mockWrittenContent.set(file, content) ), readFileSync: jest.fn(file => mockWrittenContent.get(file)), - renameSync: jest.fn((from, to) => { + moveSync: jest.fn((from, to) => { // This will only work for folders if they are always the full prefix // of the file... (that goes for both input dirs). That's the case here. if (mockWrittenContent.has(to)) { diff --git a/packages/gatsby/src/redux/__tests__/run-sift.js b/packages/gatsby/src/redux/__tests__/run-sift.js index e7da92dedf7d0..bd4ff8a82658f 100644 --- a/packages/gatsby/src/redux/__tests__/run-sift.js +++ b/packages/gatsby/src/redux/__tests__/run-sift.js @@ -132,9 +132,9 @@ if (!process.env.GATSBY_DB_NODES || process.env.GATSBY_DB_NODES === `redux`) { ) }) ;[ - { desc: `with cache`, cb: () => new Map() }, // Avoids sift for flat filters + { desc: `with cache`, cb: () /*:FiltersCache*/ => new Map() }, // Avoids sift for flat filters { desc: `no cache`, cb: () => null }, // Always goes through sift - ].forEach(({ desc, cb: createIndexCache }) => { + ].forEach(({ desc, cb: createFiltersCache }) => { describe(desc, () => { describe(`filters by just id correctly`, () => { it(`eq operator`, async () => { @@ -149,7 +149,7 @@ if (!process.env.GATSBY_DB_NODES || process.env.GATSBY_DB_NODES === `redux`) { queryArgs, firstOnly: true, nodeTypeNames: [gqlType.name], - typedKeyValueIndexes: createIndexCache(), + filtersCache: createFiltersCache(), }) const resultMany = await runSift({ @@ -157,7 +157,7 @@ if (!process.env.GATSBY_DB_NODES || process.env.GATSBY_DB_NODES === `redux`) { queryArgs, firstOnly: false, nodeTypeNames: [gqlType.name], - typedKeyValueIndexes: createIndexCache(), + filtersCache: createFiltersCache(), }) expect(resultSingular.map(o => o.id)).toEqual([mockNodes()[1].id]) @@ -176,7 +176,7 @@ if (!process.env.GATSBY_DB_NODES || process.env.GATSBY_DB_NODES === `redux`) { queryArgs, firstOnly: true, nodeTypeNames: [gqlType.name], - typedKeyValueIndexes: createIndexCache(), + filtersCache: createFiltersCache(), }) const resultMany = await runSift({ @@ -184,7 +184,7 @@ if (!process.env.GATSBY_DB_NODES || process.env.GATSBY_DB_NODES === `redux`) { queryArgs, firstOnly: false, nodeTypeNames: [gqlType.name], - typedKeyValueIndexes: createIndexCache(), + filtersCache: createFiltersCache(), }) // `id-1` node is not of queried type, so results should be empty @@ -204,7 +204,7 @@ if (!process.env.GATSBY_DB_NODES || process.env.GATSBY_DB_NODES === `redux`) { queryArgs, firstOnly: true, nodeTypeNames: [gqlType.name], - typedKeyValueIndexes: createIndexCache(), + filtersCache: createFiltersCache(), }) const resultMany = await runSift({ @@ -212,7 +212,7 @@ if (!process.env.GATSBY_DB_NODES || process.env.GATSBY_DB_NODES === `redux`) { queryArgs, firstOnly: false, nodeTypeNames: [gqlType.name], - typedKeyValueIndexes: createIndexCache(), + filtersCache: createFiltersCache(), }) expect(resultSingular.map(o => o.id)).toEqual([mockNodes()[2].id]) @@ -228,7 +228,7 @@ if (!process.env.GATSBY_DB_NODES || process.env.GATSBY_DB_NODES === `redux`) { queryArgs, firstOnly: true, nodeTypeNames: [`NonExistentNodeType`], - typedKeyValueIndexes: createIndexCache(), + filtersCache: createFiltersCache(), }) expect(resultSingular).toEqual([]) }) @@ -246,7 +246,7 @@ if (!process.env.GATSBY_DB_NODES || process.env.GATSBY_DB_NODES === `redux`) { queryArgs, firstOnly: true, nodeTypeNames: [gqlType.name], - typedKeyValueIndexes: createIndexCache(), + filtersCache: createFiltersCache(), }) expect(Array.isArray(resultSingular)).toBe(true) @@ -268,7 +268,7 @@ if (!process.env.GATSBY_DB_NODES || process.env.GATSBY_DB_NODES === `redux`) { queryArgs, firstOnly: false, nodeTypeNames: [gqlType.name], - typedKeyValueIndexes: createIndexCache(), + filtersCache: createFiltersCache(), }) expect(Array.isArray(resultMany)).toBe(true) @@ -290,7 +290,7 @@ if (!process.env.GATSBY_DB_NODES || process.env.GATSBY_DB_NODES === `redux`) { queryArgs, firstOnly: true, nodeTypeNames: [gqlType.name], - typedKeyValueIndexes: createIndexCache(), + filtersCache: createFiltersCache(), }) expect(Array.isArray(resultSingular)).toBe(true) @@ -312,7 +312,7 @@ if (!process.env.GATSBY_DB_NODES || process.env.GATSBY_DB_NODES === `redux`) { queryArgs, firstOnly: false, nodeTypeNames: [gqlType.name], - typedKeyValueIndexes: createIndexCache(), + filtersCache: createFiltersCache(), }) expect(Array.isArray(resultMany)).toBe(true) @@ -334,7 +334,7 @@ if (!process.env.GATSBY_DB_NODES || process.env.GATSBY_DB_NODES === `redux`) { queryArgs, firstOnly: true, nodeTypeNames: [gqlType.name], - typedKeyValueIndexes: createIndexCache(), + filtersCache: createFiltersCache(), }) expect(Array.isArray(resultSingular)).toBe(true) @@ -352,7 +352,7 @@ if (!process.env.GATSBY_DB_NODES || process.env.GATSBY_DB_NODES === `redux`) { queryArgs, firstOnly: false, nodeTypeNames: [gqlType.name], - typedKeyValueIndexes: createIndexCache(), + filtersCache: createFiltersCache(), }) expect(resultMany).toBe(null) @@ -372,7 +372,7 @@ if (!process.env.GATSBY_DB_NODES || process.env.GATSBY_DB_NODES === `redux`) { queryArgs, firstOnly: false, nodeTypeNames: [gqlType.name], - typedKeyValueIndexes: createIndexCache(), + filtersCache: createFiltersCache(), }) expect(Array.isArray(resultMany)).toBe(true) @@ -446,7 +446,7 @@ if (!process.env.GATSBY_DB_NODES || process.env.GATSBY_DB_NODES === `redux`) { expect(results[0].deep.flat.search.chain).toEqual(300) }) - it(`ignores elemMatch`, () => { + it(`supports elemMatch`, () => { const filter = { elemList: { $elemMatch: { foo: { $eq: `baz` } }, @@ -459,7 +459,8 @@ if (!process.env.GATSBY_DB_NODES || process.env.GATSBY_DB_NODES === `redux`) { new Map() ) - expect(result).toBe(undefined) + expect(result).not.toBe(undefined) + expect(result.length).toBe(2) }) }) } else { diff --git a/packages/gatsby/src/redux/actions/internal.ts b/packages/gatsby/src/redux/actions/internal.ts index 19c811ce50087..04a33541a1e1f 100644 --- a/packages/gatsby/src/redux/actions/internal.ts +++ b/packages/gatsby/src/redux/actions/internal.ts @@ -1,4 +1,5 @@ import { + IGatsbyPlugin, ProgramStatus, ICreatePageDependencyAction, IDeleteComponentDependenciesAction, @@ -81,7 +82,7 @@ export const replaceComponentQuery = ({ */ export const replaceStaticQuery = ( args: any, - plugin: Plugin | null | undefined = null + plugin: IGatsbyPlugin | null | undefined = null ): IReplaceStaticQueryAction => { return { type: `REPLACE_STATIC_QUERY`, @@ -98,7 +99,7 @@ export const replaceStaticQuery = ( */ export const queryExtracted = ( { componentPath, query }: { componentPath: string; query: string }, - plugin: Plugin, + plugin: IGatsbyPlugin, traceId?: string ): IQueryExtractedAction => { return { @@ -116,7 +117,7 @@ export const queryExtracted = ( */ export const queryExtractionGraphQLError = ( { componentPath, error }: { componentPath: string; error: string }, - plugin: Plugin, + plugin: IGatsbyPlugin, traceId?: string ): IQueryExtractionGraphQLErrorAction => { return { @@ -135,7 +136,7 @@ export const queryExtractionGraphQLError = ( */ export const queryExtractedBabelSuccess = ( { componentPath }, - plugin: Plugin, + plugin: IGatsbyPlugin, traceId?: string ): IQueryExtractedBabelSuccessAction => { return { @@ -153,7 +154,7 @@ export const queryExtractedBabelSuccess = ( */ export const queryExtractionBabelError = ( { componentPath, error }: { componentPath: string; error: Error }, - plugin: Plugin, + plugin: IGatsbyPlugin, traceId?: string ): IQueryExtractionBabelErrorAction => { return { @@ -170,7 +171,7 @@ export const queryExtractionBabelError = ( */ export const setProgramStatus = ( status: ProgramStatus, - plugin: Plugin, + plugin: IGatsbyPlugin, traceId?: string ): ISetProgramStatusAction => { return { @@ -187,7 +188,7 @@ export const setProgramStatus = ( */ export const pageQueryRun = ( { path, componentPath, isPage }, - plugin: Plugin, + plugin: IGatsbyPlugin, traceId?: string ): IPageQueryRunAction => { return { @@ -204,7 +205,7 @@ export const pageQueryRun = ( */ export const removeStaleJob = ( contentDigest: string, - plugin: Plugin, + plugin: IGatsbyPlugin, traceId?: string ): IRemoveStaleJobAction => { return { diff --git a/packages/gatsby/src/redux/actions/restricted.js b/packages/gatsby/src/redux/actions/restricted.js deleted file mode 100644 index 85be2a3a00b9a..0000000000000 --- a/packages/gatsby/src/redux/actions/restricted.js +++ /dev/null @@ -1,459 +0,0 @@ -// @flow -const { camelCase } = require(`lodash`) -const report = require(`gatsby-cli/lib/reporter`) -const { parseTypeDef } = require(`../../schema/types/type-defs`) - -import type { Plugin } from "./types" - -const actions = {} - -/** - * Add a third-party schema to be merged into main schema. Schema has to be a - * graphql-js GraphQLSchema object. - * - * This schema is going to be merged as-is. This can easily break the main - * Gatsby schema, so it's user's responsibility to make sure it doesn't happen - * (by e.g. namespacing the schema). - * - * @availableIn [createSchemaCustomization, sourceNodes] - * - * @param {Object} $0 - * @param {GraphQLSchema} $0.schema GraphQL schema to add - */ -actions.addThirdPartySchema = ( - { schema }: { schema: GraphQLSchema }, - plugin?: Plugin, - traceId?: string -) => { - return { - type: `ADD_THIRD_PARTY_SCHEMA`, - plugin, - traceId, - payload: schema, - } -} - -import type GatsbyGraphQLType from "../../schema/types/type-builders" -/** - * Add type definitions to the GraphQL schema. - * - * @availableIn [createSchemaCustomization, sourceNodes] - * - * @param {string | GraphQLOutputType | GatsbyGraphQLType | string[] | GraphQLOutputType[] | GatsbyGraphQLType[]} types Type definitions - * - * Type definitions can be provided either as - * [`graphql-js` types](https://graphql.org/graphql-js/), in - * [GraphQL schema definition language (SDL)](https://graphql.org/learn/) - * or using Gatsby Type Builders available on the `schema` API argument. - * - * Things to note: - * * type definitions targeting node types, i.e. `MarkdownRemark` and others - * added in `sourceNodes` or `onCreateNode` APIs, need to implement the - * `Node` interface. Interface fields will be added automatically, but it - * is mandatory to label those types with `implements Node`. - * * by default, explicit type definitions from `createTypes` will be merged - * with inferred field types, and default field resolvers for `Date` (which - * adds formatting options) and `File` (which resolves the field value as - * a `relativePath` foreign-key field) are added. This behavior can be - * customised with `@infer`, `@dontInfer` directives or extensions. Fields - * may be assigned resolver (and other option like args) with additional - * directives. Currently `@dateformat`, `@link`, `@fileByRelativePath` and - * `@proxy` are available. - * - * - * Schema customization controls: - * * `@infer` - run inference on the type and add fields that don't exist on the - * defined type to it. - * * `@dontInfer` - don't run any inference on the type - * - * Extensions to add resolver options: - * * `@dateformat` - add date formatting arguments. Accepts `formatString` and - * `locale` options that sets the defaults for this field - * * `@link` - connect to a different Node. Arguments `by` and `from`, which - * define which field to compare to on a remote node and which field to use on - * the source node - * * `@fileByRelativePath` - connect to a File node. Same arguments. The - * difference from link is that this normalizes the relative path to be - * relative from the path where source node is found. - * * `@proxy` - in case the underlying node data contains field names with - * characters that are invalid in GraphQL, `proxy` allows to explicitly - * proxy those properties to fields with valid field names. Takes a `from` arg. - * - * - * @example - * exports.createSchemaCustomization = ({ actions }) => { - * const { createTypes } = actions - * const typeDefs = ` - * """ - * Markdown Node - * """ - * type MarkdownRemark implements Node @infer { - * frontmatter: Frontmatter! - * } - * - * """ - * Markdown Frontmatter - * """ - * type Frontmatter @infer { - * title: String! - * author: AuthorJson! @link - * date: Date! @dateformat - * published: Boolean! - * tags: [String!]! - * } - * - * """ - * Author information - * """ - * # Does not include automatically inferred fields - * type AuthorJson implements Node @dontInfer { - * name: String! - * birthday: Date! @dateformat(locale: "ru") - * } - * ` - * createTypes(typeDefs) - * } - * - * // using Gatsby Type Builder API - * exports.createSchemaCustomization = ({ actions, schema }) => { - * const { createTypes } = actions - * const typeDefs = [ - * schema.buildObjectType({ - * name: 'MarkdownRemark', - * fields: { - * frontmatter: 'Frontmatter!' - * }, - * interfaces: ['Node'], - * extensions: { - * infer: true, - * }, - * }), - * schema.buildObjectType({ - * name: 'Frontmatter', - * fields: { - * title: { - * type: 'String!', - * resolve(parent) { - * return parent.title || '(Untitled)' - * } - * }, - * author: { - * type: 'AuthorJson' - * extensions: { - * link: {}, - * }, - * } - * date: { - * type: 'Date!' - * extensions: { - * dateformat: {}, - * }, - * }, - * published: 'Boolean!', - * tags: '[String!]!', - * } - * }), - * schema.buildObjectType({ - * name: 'AuthorJson', - * fields: { - * name: 'String!' - * birthday: { - * type: 'Date!' - * extensions: { - * dateformat: { - * locale: 'ru', - * }, - * }, - * }, - * }, - * interfaces: ['Node'], - * extensions: { - * infer: false, - * }, - * }), - * ] - * createTypes(typeDefs) - * } - */ -actions.createTypes = ( - types: - | string - | GraphQLOutputType - | GatsbyGraphQLType - | Array, - plugin?: Plugin, - traceId?: string -) => { - return { - type: `CREATE_TYPES`, - plugin, - traceId, - payload: Array.isArray(types) - ? types.map(parseTypeDef) - : parseTypeDef(types), - } -} - -const { reservedExtensionNames } = require(`../../schema/extensions`) -import type GraphQLFieldExtensionDefinition from "../../schema/extensions" -/** - * Add a field extension to the GraphQL schema. - * - * Extensions allow defining custom behavior which can be added to fields - * via directive (in SDL) or on the `extensions` prop (with Type Builders). - * - * The extension definition takes a `name`, an `extend` function, and optional - * extension `args` for options. The `extend` function has to return a (partial) - * field config, and receives the extension options and the previous field config - * as arguments. - * - * @availableIn [createSchemaCustomization, sourceNodes] - * - * @param {GraphQLFieldExtensionDefinition} extension The field extension definition - * @example - * exports.createSchemaCustomization = ({ actions }) => { - * const { createFieldExtension } = actions - * createFieldExtension({ - * name: 'motivate', - * args: { - * caffeine: 'Int' - * }, - * extend(options, prevFieldConfig) { - * return { - * type: 'String', - * args: { - * sunshine: { - * type: 'Int', - * defaultValue: 0, - * }, - * }, - * resolve(source, args, context, info) { - * const motivation = (options.caffeine || 0) - args.sunshine - * if (motivation > 5) return 'Work! Work! Work!' - * return 'Maybe tomorrow.' - * }, - * } - * }, - * }) - * } - */ -actions.createFieldExtension = ( - extension: GraphQLFieldExtensionDefinition, - plugin?: Plugin, - traceId?: string -) => (dispatch, getState) => { - const { name } = extension || {} - const { fieldExtensions } = getState().schemaCustomization - - if (!name) { - report.error(`The provided field extension must have a \`name\` property.`) - } else if (reservedExtensionNames.includes(name)) { - report.error( - `The field extension name \`${name}\` is reserved for internal use.` - ) - } else if (fieldExtensions[name]) { - report.error( - `A field extension with the name \`${name}\` has already been registered.` - ) - } else { - dispatch({ - type: `CREATE_FIELD_EXTENSION`, - plugin, - traceId, - payload: { name, extension }, - }) - } -} - -/** - * Write GraphQL schema to file - * - * Writes out inferred and explicitly specified type definitions. This is not - * the full GraphQL schema, but only the types necessary to recreate all type - * definitions, i.e. it does not include directives, built-ins, and derived - * types for filtering, sorting, pagination etc. Optionally, you can define a - * list of types to include/exclude. This is recommended to avoid including - * definitions for plugin-created types. - * - * @availableIn [createSchemaCustomization] - * - * @param {object} $0 - * @param {string} [$0.path] The path to the output file, defaults to `schema.gql` - * @param {object} [$0.include] Configure types to include - * @param {string[]} [$0.include.types] Only include these types - * @param {string[]} [$0.include.plugins] Only include types owned by these plugins - * @param {object} [$0.exclude] Configure types to exclude - * @param {string[]} [$0.exclude.types] Do not include these types - * @param {string[]} [$0.exclude.plugins] Do not include types owned by these plugins - * @param {boolean} [withFieldTypes] Include field types, defaults to `true` - */ -actions.printTypeDefinitions = ( - { - path = `schema.gql`, - include, - exclude, - withFieldTypes = true, - }: { - path?: string, - include?: { types?: Array, plugins?: Array }, - exclude?: { types?: Array, plugins?: Array }, - withFieldTypes?: boolean, - }, - plugin?: Plugin, - traceId?: string -) => { - return { - type: `PRINT_SCHEMA_REQUESTED`, - plugin, - traceId, - payload: { - path, - include, - exclude, - withFieldTypes, - }, - } -} - -/** - * Make functionality available on field resolver `context` - * - * @availableIn [createSchemaCustomization] - * - * @param {object} context Object to make available on `context`. - * When called from a plugin, the context value will be namespaced under - * the camel-cased plugin name without the "gatsby-" prefix - * @example - * const getHtml = md => remark().use(html).process(md) - * exports.createSchemaCustomization = ({ actions }) => { - * actions.createResolverContext({ getHtml }) - * } - * // The context value can then be accessed in any field resolver like this: - * exports.createSchemaCustomization = ({ actions }) => { - * actions.createTypes(schema.buildObjectType({ - * name: 'Test', - * interfaces: ['Node'], - * fields: { - * md: { - * type: 'String!', - * async resolve(source, args, context, info) { - * const processed = await context.transformerRemark.getHtml(source.internal.contents) - * return processed.contents - * } - * } - * } - * })) - * } - */ -actions.createResolverContext = ( - context: object, - plugin?: Plugin, - traceId?: string -) => dispatch => { - if (!context || typeof context !== `object`) { - report.error( - `Expected context value passed to \`createResolverContext\` to be an object. Received "${context}".` - ) - } else { - const { name } = plugin || {} - const payload = - !name || name === `default-site-plugin` - ? context - : { [camelCase(name.replace(/^gatsby-/, ``))]: context } - dispatch({ - type: `CREATE_RESOLVER_CONTEXT`, - plugin, - traceId, - payload, - }) - } -} - -const withDeprecationWarning = (actionName, action, api, allowedIn) => ( - ...args -) => { - report.warn( - `Calling \`${actionName}\` in the \`${api}\` API is deprecated. ` + - `Please use: ${allowedIn.map(a => `\`${a}\``).join(`, `)}.` - ) - return action(...args) -} - -const withErrorMessage = (actionName, api, allowedIn) => () => - // return a thunk that does not dispatch anything - () => { - report.error( - `\`${actionName}\` is not available in the \`${api}\` API. ` + - `Please use: ${allowedIn.map(a => `\`${a}\``).join(`, `)}.` - ) - } - -const nodeAPIs = Object.keys(require(`../../utils/api-node-docs`)) - -const ALLOWED_IN = `ALLOWED_IN` -const DEPRECATED_IN = `DEPRECATED_IN` - -const set = (availableActionsByAPI, api, actionName, action) => { - availableActionsByAPI[api] = availableActionsByAPI[api] || {} - availableActionsByAPI[api][actionName] = action -} - -const mapAvailableActionsToAPIs = restrictions => { - const availableActionsByAPI = {} - - const actionNames = Object.keys(restrictions) - actionNames.forEach(actionName => { - const action = actions[actionName] - - const allowedIn = restrictions[actionName][ALLOWED_IN] || [] - allowedIn.forEach(api => - set(availableActionsByAPI, api, actionName, action) - ) - - const deprecatedIn = restrictions[actionName][DEPRECATED_IN] || [] - deprecatedIn.forEach(api => - set( - availableActionsByAPI, - api, - actionName, - withDeprecationWarning(actionName, action, api, allowedIn) - ) - ) - - const forbiddenIn = nodeAPIs.filter( - api => ![...allowedIn, ...deprecatedIn].includes(api) - ) - forbiddenIn.forEach(api => - set( - availableActionsByAPI, - api, - actionName, - withErrorMessage(actionName, api, allowedIn) - ) - ) - }) - - return availableActionsByAPI -} - -const availableActionsByAPI = mapAvailableActionsToAPIs({ - createFieldExtension: { - [ALLOWED_IN]: [`sourceNodes`, `createSchemaCustomization`], - }, - createTypes: { - [ALLOWED_IN]: [`sourceNodes`, `createSchemaCustomization`], - [DEPRECATED_IN]: [`onPreInit`, `onPreBootstrap`], - }, - createResolverContext: { - [ALLOWED_IN]: [`createSchemaCustomization`], - }, - addThirdPartySchema: { - [ALLOWED_IN]: [`sourceNodes`, `createSchemaCustomization`], - [DEPRECATED_IN]: [`onPreInit`, `onPreBootstrap`], - }, - printTypeDefinitions: { - [ALLOWED_IN]: [`createSchemaCustomization`], - }, -}) - -module.exports = { actions, availableActionsByAPI } diff --git a/packages/gatsby/src/redux/actions/restricted.ts b/packages/gatsby/src/redux/actions/restricted.ts new file mode 100644 index 0000000000000..8cb4a68b89f0b --- /dev/null +++ b/packages/gatsby/src/redux/actions/restricted.ts @@ -0,0 +1,517 @@ +import { camelCase } from "lodash" +import { GraphQLSchema, GraphQLOutputType } from "graphql" +import { ActionCreator } from "redux" +import { ThunkAction } from "redux-thunk" +import report from "gatsby-cli/lib/reporter" +import { parseTypeDef } from "../../schema/types/type-defs" +import { + GraphQLFieldExtensionDefinition, + reservedExtensionNames, +} from "../../schema/extensions" +import { GatsbyGraphQLType } from "../../schema/types/type-builders" +import { + IGatsbyPlugin, + ActionsUnion, + IAddThirdPartySchema, + ICreateTypes, + IGatsbyState, + ICreateFieldExtension, + IPrintTypeDefinitions, + ICreateResolverContext, + IGatsbyPluginContext, +} from "../types" + +type RestrictionActionNames = + | "createFieldExtension" + | "createTypes" + | "createResolverContext" + | "addThirdPartySchema" + | "printTypeDefinitions" + +type SomeActionCreator = + | ActionCreator + | ActionCreator> + +export const actions = { + /** + * Add a third-party schema to be merged into main schema. Schema has to be a + * graphql-js GraphQLSchema object. + * + * This schema is going to be merged as-is. This can easily break the main + * Gatsby schema, so it's user's responsibility to make sure it doesn't happen + * (by e.g. namespacing the schema). + * + * @availableIn [createSchemaCustomization, sourceNodes] + * + * @param {Object} $0 + * @param {GraphQLSchema} $0.schema GraphQL schema to add + */ + addThirdPartySchema: ( + { schema }: { schema: GraphQLSchema }, + plugin: IGatsbyPlugin, + traceId?: string + ): IAddThirdPartySchema => { + return { + type: `ADD_THIRD_PARTY_SCHEMA`, + plugin, + traceId, + payload: schema, + } + }, + + /** + * Add type definitions to the GraphQL schema. + * + * @availableIn [createSchemaCustomization, sourceNodes] + * + * @param {string | GraphQLOutputType | GatsbyGraphQLType | string[] | GraphQLOutputType[] | GatsbyGraphQLType[]} types Type definitions + * + * Type definitions can be provided either as + * [`graphql-js` types](https://graphql.org/graphql-js/), in + * [GraphQL schema definition language (SDL)](https://graphql.org/learn/) + * or using Gatsby Type Builders available on the `schema` API argument. + * + * Things to note: + * * type definitions targeting node types, i.e. `MarkdownRemark` and others + * added in `sourceNodes` or `onCreateNode` APIs, need to implement the + * `Node` interface. Interface fields will be added automatically, but it + * is mandatory to label those types with `implements Node`. + * * by default, explicit type definitions from `createTypes` will be merged + * with inferred field types, and default field resolvers for `Date` (which + * adds formatting options) and `File` (which resolves the field value as + * a `relativePath` foreign-key field) are added. This behavior can be + * customised with `@infer`, `@dontInfer` directives or extensions. Fields + * may be assigned resolver (and other option like args) with additional + * directives. Currently `@dateformat`, `@link`, `@fileByRelativePath` and + * `@proxy` are available. + * + * + * Schema customization controls: + * * `@infer` - run inference on the type and add fields that don't exist on the + * defined type to it. + * * `@dontInfer` - don't run any inference on the type + * + * Extensions to add resolver options: + * * `@dateformat` - add date formatting arguments. Accepts `formatString` and + * `locale` options that sets the defaults for this field + * * `@link` - connect to a different Node. Arguments `by` and `from`, which + * define which field to compare to on a remote node and which field to use on + * the source node + * * `@fileByRelativePath` - connect to a File node. Same arguments. The + * difference from link is that this normalizes the relative path to be + * relative from the path where source node is found. + * * `@proxy` - in case the underlying node data contains field names with + * characters that are invalid in GraphQL, `proxy` allows to explicitly + * proxy those properties to fields with valid field names. Takes a `from` arg. + * + * + * @example + * exports.createSchemaCustomization = ({ actions }) => { + * const { createTypes } = actions + * const typeDefs = ` + * """ + * Markdown Node + * """ + * type MarkdownRemark implements Node @infer { + * frontmatter: Frontmatter! + * } + * + * """ + * Markdown Frontmatter + * """ + * type Frontmatter @infer { + * title: String! + * author: AuthorJson! @link + * date: Date! @dateformat + * published: Boolean! + * tags: [String!]! + * } + * + * """ + * Author information + * """ + * # Does not include automatically inferred fields + * type AuthorJson implements Node @dontInfer { + * name: String! + * birthday: Date! @dateformat(locale: "ru") + * } + * ` + * createTypes(typeDefs) + * } + * + * // using Gatsby Type Builder API + * exports.createSchemaCustomization = ({ actions, schema }) => { + * const { createTypes } = actions + * const typeDefs = [ + * schema.buildObjectType({ + * name: 'MarkdownRemark', + * fields: { + * frontmatter: 'Frontmatter!' + * }, + * interfaces: ['Node'], + * extensions: { + * infer: true, + * }, + * }), + * schema.buildObjectType({ + * name: 'Frontmatter', + * fields: { + * title: { + * type: 'String!', + * resolve(parent) { + * return parent.title || '(Untitled)' + * } + * }, + * author: { + * type: 'AuthorJson' + * extensions: { + * link: {}, + * }, + * } + * date: { + * type: 'Date!' + * extensions: { + * dateformat: {}, + * }, + * }, + * published: 'Boolean!', + * tags: '[String!]!', + * } + * }), + * schema.buildObjectType({ + * name: 'AuthorJson', + * fields: { + * name: 'String!' + * birthday: { + * type: 'Date!' + * extensions: { + * dateformat: { + * locale: 'ru', + * }, + * }, + * }, + * }, + * interfaces: ['Node'], + * extensions: { + * infer: false, + * }, + * }), + * ] + * createTypes(typeDefs) + * } + */ + createTypes: ( + types: + | string + | GraphQLOutputType + | GatsbyGraphQLType + | Array, + plugin: IGatsbyPlugin, + traceId?: string + ): ICreateTypes => { + return { + type: `CREATE_TYPES`, + plugin, + traceId, + payload: Array.isArray(types) + ? types.map(parseTypeDef) + : parseTypeDef(types), + } + }, + + /** + * Add a field extension to the GraphQL schema. + * + * Extensions allow defining custom behavior which can be added to fields + * via directive (in SDL) or on the `extensions` prop (with Type Builders). + * + * The extension definition takes a `name`, an `extend` function, and optional + * extension `args` for options. The `extend` function has to return a (partial) + * field config, and receives the extension options and the previous field config + * as arguments. + * + * @availableIn [createSchemaCustomization, sourceNodes] + * + * @param {GraphQLFieldExtensionDefinition} extension The field extension definition + * @example + * exports.createSchemaCustomization = ({ actions }) => { + * const { createFieldExtension } = actions + * createFieldExtension({ + * name: 'motivate', + * args: { + * caffeine: 'Int' + * }, + * extend(options, prevFieldConfig) { + * return { + * type: 'String', + * args: { + * sunshine: { + * type: 'Int', + * defaultValue: 0, + * }, + * }, + * resolve(source, args, context, info) { + * const motivation = (options.caffeine || 0) - args.sunshine + * if (motivation > 5) return 'Work! Work! Work!' + * return 'Maybe tomorrow.' + * }, + * } + * }, + * }) + * } + */ + createFieldExtension: ( + extension: GraphQLFieldExtensionDefinition, + plugin: IGatsbyPlugin, + traceId?: string + ): ThunkAction => ( + dispatch, + getState + ): void => { + const { name } = extension || {} + const { fieldExtensions } = getState().schemaCustomization + + if (!name) { + report.error( + `The provided field extension must have a \`name\` property.` + ) + } else if (reservedExtensionNames.includes(name)) { + report.error( + `The field extension name \`${name}\` is reserved for internal use.` + ) + } else if (fieldExtensions[name]) { + report.error( + `A field extension with the name \`${name}\` has already been registered.` + ) + } else { + dispatch({ + type: `CREATE_FIELD_EXTENSION`, + plugin, + traceId, + payload: { name, extension }, + }) + } + }, + + /** + * Write GraphQL schema to file + * + * Writes out inferred and explicitly specified type definitions. This is not + * the full GraphQL schema, but only the types necessary to recreate all type + * definitions, i.e. it does not include directives, built-ins, and derived + * types for filtering, sorting, pagination etc. Optionally, you can define a + * list of types to include/exclude. This is recommended to avoid including + * definitions for plugin-created types. + * + * @availableIn [createSchemaCustomization] + * + * @param {object} $0 + * @param {string} [$0.path] The path to the output file, defaults to `schema.gql` + * @param {object} [$0.include] Configure types to include + * @param {string[]} [$0.include.types] Only include these types + * @param {string[]} [$0.include.plugins] Only include types owned by these plugins + * @param {object} [$0.exclude] Configure types to exclude + * @param {string[]} [$0.exclude.types] Do not include these types + * @param {string[]} [$0.exclude.plugins] Do not include types owned by these plugins + * @param {boolean} [withFieldTypes] Include field types, defaults to `true` + */ + printTypeDefinitions: ( + { + path = `schema.gql`, + include, + exclude, + withFieldTypes = true, + }: { + path?: string + include?: { types?: Array; plugins?: Array } + exclude?: { types?: Array; plugins?: Array } + withFieldTypes?: boolean + }, + plugin: IGatsbyPlugin, + traceId?: string + ): IPrintTypeDefinitions => { + return { + type: `PRINT_SCHEMA_REQUESTED`, + plugin, + traceId, + payload: { + path, + include, + exclude, + withFieldTypes, + }, + } + }, + + /** + * Make functionality available on field resolver `context` + * + * @availableIn [createSchemaCustomization] + * + * @param {object} context Object to make available on `context`. + * When called from a plugin, the context value will be namespaced under + * the camel-cased plugin name without the "gatsby-" prefix + * @example + * const getHtml = md => remark().use(html).process(md) + * exports.createSchemaCustomization = ({ actions }) => { + * actions.createResolverContext({ getHtml }) + * } + * // The context value can then be accessed in any field resolver like this: + * exports.createSchemaCustomization = ({ actions }) => { + * actions.createTypes(schema.buildObjectType({ + * name: 'Test', + * interfaces: ['Node'], + * fields: { + * md: { + * type: 'String!', + * async resolve(source, args, context, info) { + * const processed = await context.transformerRemark.getHtml(source.internal.contents) + * return processed.contents + * } + * } + * } + * })) + * } + */ + createResolverContext: ( + context: IGatsbyPluginContext, + plugin: IGatsbyPlugin, + traceId?: string + ): ThunkAction => ( + dispatch + ): void => { + if (!context || typeof context !== `object`) { + report.error( + `Expected context value passed to \`createResolverContext\` to be an object. Received "${context}".` + ) + } else { + const { name } = plugin || {} + const payload = + !name || name === `default-site-plugin` + ? context + : { [camelCase(name.replace(/^gatsby-/, ``))]: context } + dispatch({ + type: `CREATE_RESOLVER_CONTEXT`, + plugin, + traceId, + payload, + }) + } + }, +} + +const withDeprecationWarning = ( + actionName: RestrictionActionNames, + action: SomeActionCreator, + api: API, + allowedIn: API[] +): SomeActionCreator => (...args: any[]): ReturnType> => { + report.warn( + `Calling \`${actionName}\` in the \`${api}\` API is deprecated. ` + + `Please use: ${allowedIn.map(a => `\`${a}\``).join(`, `)}.` + ) + return action(...args) +} + +const withErrorMessage = ( + actionName: RestrictionActionNames, + api: API, + allowedIn: API[] +) => () => + // return a thunk that does not dispatch anything + (): void => { + report.error( + `\`${actionName}\` is not available in the \`${api}\` API. ` + + `Please use: ${allowedIn.map(a => `\`${a}\``).join(`, `)}.` + ) + } + +const nodeAPIs = Object.keys(require(`../../utils/api-node-docs`)) + +const ALLOWED_IN = `ALLOWED_IN` +const DEPRECATED_IN = `DEPRECATED_IN` + +type API = string + +type Restrictions = Record< + RestrictionActionNames, + Partial<{ + ALLOWED_IN: API[] + DEPRECATED_IN: API[] + }> +> + +type AvailableActionsByAPI = Record< + API, + { [K in RestrictionActionNames]: SomeActionCreator } +> + +const set = ( + availableActionsByAPI: {}, + api: API, + actionName: RestrictionActionNames, + action: SomeActionCreator +): void => { + availableActionsByAPI[api] = availableActionsByAPI[api] || {} + availableActionsByAPI[api][actionName] = action +} + +const mapAvailableActionsToAPIs = ( + restrictions: Restrictions +): AvailableActionsByAPI => { + const availableActionsByAPI: AvailableActionsByAPI = {} + + const actionNames = Object.keys(restrictions) as (keyof typeof restrictions)[] + actionNames.forEach(actionName => { + const action = actions[actionName] + + const allowedIn: API[] = restrictions[actionName][ALLOWED_IN] || [] + allowedIn.forEach(api => + set(availableActionsByAPI, api, actionName, action) + ) + + const deprecatedIn: API[] = restrictions[actionName][DEPRECATED_IN] || [] + deprecatedIn.forEach(api => + set( + availableActionsByAPI, + api, + actionName, + withDeprecationWarning(actionName, action, api, allowedIn) + ) + ) + + const forbiddenIn = nodeAPIs.filter( + api => ![...allowedIn, ...deprecatedIn].includes(api) + ) + forbiddenIn.forEach(api => + set( + availableActionsByAPI, + api, + actionName, + withErrorMessage(actionName, api, allowedIn) + ) + ) + }) + + return availableActionsByAPI +} + +export const availableActionsByAPI = mapAvailableActionsToAPIs({ + createFieldExtension: { + [ALLOWED_IN]: [`sourceNodes`, `createSchemaCustomization`], + }, + createTypes: { + [ALLOWED_IN]: [`sourceNodes`, `createSchemaCustomization`], + [DEPRECATED_IN]: [`onPreInit`, `onPreBootstrap`], + }, + createResolverContext: { + [ALLOWED_IN]: [`createSchemaCustomization`], + }, + addThirdPartySchema: { + [ALLOWED_IN]: [`sourceNodes`, `createSchemaCustomization`], + [DEPRECATED_IN]: [`onPreInit`, `onPreBootstrap`], + }, + printTypeDefinitions: { + [ALLOWED_IN]: [`createSchemaCustomization`], + }, +}) diff --git a/packages/gatsby/src/redux/actions/types.js b/packages/gatsby/src/redux/actions/types.js deleted file mode 100644 index 5980f1797479d..0000000000000 --- a/packages/gatsby/src/redux/actions/types.js +++ /dev/null @@ -1,6 +0,0 @@ -// @flow -type Plugin = { - name: string, -} - -export type { Plugin } diff --git a/packages/gatsby/src/redux/nodes.ts b/packages/gatsby/src/redux/nodes.ts index 1c68274b3f17f..03c518d824304 100644 --- a/packages/gatsby/src/redux/nodes.ts +++ b/packages/gatsby/src/redux/nodes.ts @@ -1,8 +1,11 @@ import { store } from "./" import { IGatsbyNode } from "./types" import { createPageDependency } from "./actions/add-page-dependency" +import { IDbQueryElemMatch } from "../db/common/query" export type FilterCacheKey = string +export type FilterCache = Map> +export type FiltersCache = Map /** * Get all nodes from redux store. @@ -160,59 +163,178 @@ export const ensureIndexByTypedChain = ( cacheKey: FilterCacheKey, chain: string[], nodeTypeNames: string[], - typedKeyValueIndexes: Map< - FilterCacheKey, - Map> - > + filtersCache: FiltersCache ): void => { - if (typedKeyValueIndexes.has(cacheKey)) { + const state = store.getState() + const resolvedNodesCache = state.resolvedNodesCache + + const filterCache: FilterCache = new Map() + filtersCache.set(cacheKey, filterCache) + + // We cache the subsets of nodes by type, but only one type. So if searching + // through one node type we can prevent a search through all nodes, otherwise + // it's probably faster to loop through all nodes. Perhaps. Maybe. + + if (nodeTypeNames.length === 1) { + getNodesByType(nodeTypeNames[0]).forEach(node => { + addNodeToFilterCache(node, chain, filterCache, resolvedNodesCache) + }) + } else { + // Here we must first filter for the node type + // This loop is expensive at scale (!) + state.nodes.forEach(node => { + if (!nodeTypeNames.includes(node.internal.type)) { + return + } + + addNodeToFilterCache(node, chain, filterCache, resolvedNodesCache) + }) + } +} + +function addNodeToFilterCache( + node: IGatsbyNode, + chain: Array, + filterCache: FilterCache, + resolvedNodesCache, + valueOffset: any = node +): void { + // There can be a filter that targets `__gatsby_resolved` so fix that first + if (!node.__gatsby_resolved) { + const typeName = node.internal.type + const resolvedNodes = resolvedNodesCache.get(typeName) + node.__gatsby_resolved = resolvedNodes?.get(node.id) + } + + // - for plain query, valueOffset === node + // - for elemMatch, valueOffset is sub-tree of the node to continue matching + let v = valueOffset as any + let i = 0 + while (i < chain.length && v) { + const nextProp = chain[i++] + v = v[nextProp] + } + + if ( + (typeof v !== `string` && + typeof v !== `number` && + typeof v !== `boolean`) || + i !== chain.length + ) { + // Not sure whether this is supposed to happen, but this means that either + // - The node chain ended with `undefined`, or + // - The node chain ended in something other than a primitive, or + // - A part in the chain in the object was not an object return } - const { nodes, resolvedNodesCache } = store.getState() + let set = filterCache.get(v) + if (!set) { + set = new Set() + filterCache.set(v, set) + } + set.add(node) +} - const byKeyValue = new Map>() - typedKeyValueIndexes.set(cacheKey, byKeyValue) +export const ensureIndexByElemMatch = ( + cacheKey: FilterCacheKey, + filter: IDbQueryElemMatch, + nodeTypeNames: Array, + filtersCache: FiltersCache +): void => { + // Given an elemMatch filter, generate the cache that contains all nodes that + // matches a given value for that sub-query + + const state = store.getState() + const { resolvedNodesCache } = state + + const filterCache: FilterCache = new Map() + filtersCache.set(cacheKey, filterCache) + + if (nodeTypeNames.length === 1) { + getNodesByType(nodeTypeNames[0]).forEach(node => { + addNodeToBucketWithElemMatch( + node, + node, + filter, + filterCache, + resolvedNodesCache + ) + }) + } else { + // Expensive at scale + state.nodes.forEach(node => { + if (!nodeTypeNames.includes(node.internal.type)) { + return + } + + addNodeToBucketWithElemMatch( + node, + node, + filter, + filterCache, + resolvedNodesCache + ) + }) + } +} - nodes.forEach(node => { - if (!nodeTypeNames.includes(node.internal.type)) { - return - } +function addNodeToBucketWithElemMatch( + node: IGatsbyNode, + valueAtCurrentStep: any, // Arbitrary step on the path inside the node + filter: IDbQueryElemMatch, + filterCache: FilterCache, + resolvedNodesCache +): void { + // There can be a filter that targets `__gatsby_resolved` so fix that first + if (!node.__gatsby_resolved) { + const typeName = node.internal.type + const resolvedNodes = resolvedNodesCache.get(typeName) + node.__gatsby_resolved = resolvedNodes?.get(node.id) + } - // There can be a filter that targets `__gatsby_resolved` so fix that first - if (!node.__gatsby_resolved) { - const typeName = node.internal.type - const resolvedNodes = resolvedNodesCache.get(typeName) - node.__gatsby_resolved = resolvedNodes?.get(node.id) - } + const { path, nestedQuery } = filter - let v = node as any - let i = 0 - while (i < chain.length && v) { - const nextProp = chain[i++] - v = v[nextProp] - } + // Find the value to apply elemMatch to + let i = 0 + while (i < path.length && valueAtCurrentStep) { + const nextProp = path[i++] + valueAtCurrentStep = valueAtCurrentStep[nextProp] + } - if ( - (typeof v !== `string` && - typeof v !== `number` && - typeof v !== `boolean`) || - i !== chain.length - ) { - // Not sure whether this is supposed to happen, but this means that either - // - The node chain ended with `undefined`, or - // - The node chain ended in something other than a primitive, or - // - A part in the chain in the object was not an object - return - } + if (path.length !== i) { + // Found undefined before the end of the path, so let Sift take over + return + } - let set = byKeyValue.get(v) - if (!set) { - set = new Set() - byKeyValue.set(v, set) - } - set.add(node) - }) + // `v` should now be an elemMatch target, probably an array (but maybe not) + + if (Array.isArray(valueAtCurrentStep)) { + // Note: We need to check all elements because the node may need to be added + // to multiple buckets (`{a:[{b:3},{b:4}]}`, for `a.elemMatch.b/eq` that + // node ends up in buckets for value 3 and 4. This may lead to duplicate + // work when elements resolve to the same value, but that can't be helped. + valueAtCurrentStep.forEach(elem => { + if (nestedQuery.type === `elemMatch`) { + addNodeToBucketWithElemMatch( + node, + elem, + nestedQuery, + filterCache, + resolvedNodesCache + ) + } else { + // Now take same route as non-elemMatch filters would take + addNodeToFilterCache( + node, + nestedQuery.path, + filterCache, + resolvedNodesCache, + elem + ) + } + }) + } } /** @@ -228,14 +350,11 @@ export const ensureIndexByTypedChain = ( * The only exception is `id`, since internally there can be at most one node * per `id` so there's a minor optimization for that (no need for Sets). */ -export const getNodesByTypedChain = ( +export const getFilterCacheByTypedChain = ( cacheKey: FilterCacheKey, value: boolean | number | string, - typedKeyValueIndexes: Map< - FilterCacheKey, - Map> - > + filtersCache: FiltersCache ): Set | undefined => { - const byTypedKey = typedKeyValueIndexes?.get(cacheKey) + const byTypedKey = filtersCache?.get(cacheKey) return byTypedKey?.get(value) } diff --git a/packages/gatsby/src/redux/persist.ts b/packages/gatsby/src/redux/persist.ts index 7bc4a0cba0c0c..68a049a0b7fdb 100644 --- a/packages/gatsby/src/redux/persist.ts +++ b/packages/gatsby/src/redux/persist.ts @@ -1,11 +1,12 @@ import path from "path" +import os from "os" import v8 from "v8" import { existsSync, mkdtempSync, + moveSync, // Note: moveSync over renameSync because /tmp may be on other mount readFileSync, removeSync, - renameSync, writeFileSync, } from "fs-extra" import { IGatsbyNode, ICachedReduxState } from "./types" @@ -130,7 +131,7 @@ function safelyRenameToBak(reduxCacheFolder: string): string { ++suffixCounter bakName = reduxCacheFolder + tmpSuffix + suffixCounter } - renameSync(reduxCacheFolder, bakName) + moveSync(reduxCacheFolder, bakName) return bakName } @@ -139,7 +140,7 @@ export function writeToCache(contents: ICachedReduxState): void { // Note: this should be a transactional operation. So work in a tmp dir and // make sure the cache cannot be left in a corruptable state due to errors. - const tmpDir = mkdtempSync(`reduxcache`) // linux / windows + const tmpDir = mkdtempSync(path.join(os.tmpdir(), `reduxcache`)) // linux / windows prepareCacheFolder(tmpDir, contents) @@ -156,7 +157,7 @@ export function writeToCache(contents: ICachedReduxState): void { } // The redux cache folder should now not exist so we can rename our tmp to it - renameSync(tmpDir, reduxCacheFolder) + moveSync(tmpDir, reduxCacheFolder) // Now try to yolorimraf the old cache folder try { diff --git a/packages/gatsby/src/redux/reducers/__tests__/redirects.js b/packages/gatsby/src/redux/reducers/__tests__/redirects.ts similarity index 86% rename from packages/gatsby/src/redux/reducers/__tests__/redirects.js rename to packages/gatsby/src/redux/reducers/__tests__/redirects.ts index 605500d55e604..76323bf7035fc 100644 --- a/packages/gatsby/src/redux/reducers/__tests__/redirects.js +++ b/packages/gatsby/src/redux/reducers/__tests__/redirects.ts @@ -1,9 +1,11 @@ +import { ICreateRedirectAction, IRedirect } from "../../types" + let reducer describe(`redirects`, () => { beforeEach(() => { jest.isolateModules(() => { - reducer = require(`../redirects`) + reducer = require(`../redirects`).redirectsReducer }) }) it(`lets you redirect to an internal url`, () => { @@ -15,7 +17,7 @@ describe(`redirects`, () => { }, } - let state = reducer(undefined, action) + const state = reducer(undefined, action) expect(state).toEqual([ { @@ -34,7 +36,7 @@ describe(`redirects`, () => { }, } - let state = reducer(undefined, action) + const state = reducer(undefined, action) expect(state).toEqual([ { @@ -73,7 +75,10 @@ describe(`redirects`, () => { }) it(`prevents duplicate redirects`, () => { - function createRedirect(fromPath, toPath) { + function createRedirect( + fromPath: string, + toPath: string + ): ICreateRedirectAction { return { type: `CREATE_REDIRECT`, payload: { fromPath, toPath }, @@ -92,7 +97,7 @@ describe(`redirects`, () => { }) it(`allows multiple redirects with same "fromPath" but different options`, () => { - function createRedirect(redirect) { + function createRedirect(redirect: IRedirect): ICreateRedirectAction { return { type: `CREATE_REDIRECT`, payload: redirect, diff --git a/packages/gatsby/src/redux/reducers/index.js b/packages/gatsby/src/redux/reducers/index.js index 67067487f1b42..86a4e2509a487 100644 --- a/packages/gatsby/src/redux/reducers/index.js +++ b/packages/gatsby/src/redux/reducers/index.js @@ -1,5 +1,6 @@ const reduxNodes = require(`./nodes`) const lokiNodes = require(`../../db/loki/nodes`).reducer +import { redirectsReducer } from "./redirects" const backend = process.env.GATSBY_DB_NODES || `redux` @@ -59,7 +60,7 @@ module.exports = { jobsV2: require(`./jobsv2`), webpack: require(`./webpack`), webpackCompilationHash: require(`./webpack-compilation-hash`), - redirects: require(`./redirects`), + redirects: redirectsReducer, babelrc: require(`./babelrc`), schemaCustomization: require(`./schema-customization`), themes: require(`./themes`), diff --git a/packages/gatsby/src/redux/reducers/redirects.js b/packages/gatsby/src/redux/reducers/redirects.js deleted file mode 100644 index ed8b072dc98fd..0000000000000 --- a/packages/gatsby/src/redux/reducers/redirects.js +++ /dev/null @@ -1,44 +0,0 @@ -const _ = require(`lodash`) - -const redirects = new Map() - -function exists(newRedirect) { - if (!redirects.has(newRedirect.fromPath)) { - return false - } - - return redirects - .get(newRedirect.fromPath) - .some(redirect => _.isEqual(redirect, newRedirect)) -} - -function add(redirect) { - let samePathRedirects = redirects.get(redirect.fromPath) - - if (!samePathRedirects) { - samePathRedirects = [] - redirects.set(redirect.fromPath, samePathRedirects) - } - - samePathRedirects.push(redirect) -} - -module.exports = (state = [], action) => { - switch (action.type) { - case `CREATE_REDIRECT`: { - const redirect = action.payload - - // Add redirect only if it wasn't yet added to prevent duplicates - if (!exists(redirect)) { - add(redirect) - - state.push(redirect) - } - - return state - } - - default: - return state - } -} diff --git a/packages/gatsby/src/redux/reducers/redirects.ts b/packages/gatsby/src/redux/reducers/redirects.ts new file mode 100644 index 0000000000000..716d0ebe3ed45 --- /dev/null +++ b/packages/gatsby/src/redux/reducers/redirects.ts @@ -0,0 +1,46 @@ +import _ from "lodash" +import { IGatsbyState, IRedirect, ICreateRedirectAction } from "../types" + +const redirects = new Map() + +function exists(newRedirect: IRedirect): boolean { + const fromPathRedirects = redirects.get(newRedirect.fromPath) + + if (!fromPathRedirects) return false + + return fromPathRedirects.some(redirect => _.isEqual(redirect, newRedirect)) +} + +function add(redirect: IRedirect): void { + let samePathRedirects = redirects.get(redirect.fromPath) + + if (!samePathRedirects) { + samePathRedirects = [] + redirects.set(redirect.fromPath, samePathRedirects) + } + + samePathRedirects.push(redirect) +} + +export const redirectsReducer = ( + state: IGatsbyState["redirects"] = [], + action: ICreateRedirectAction +): IGatsbyState["redirects"] => { + switch (action.type) { + case `CREATE_REDIRECT`: { + const redirect = action.payload + + // Add redirect only if it wasn't yet added to prevent duplicates + if (!exists(redirect)) { + add(redirect) + + state.push(redirect) + } + + return state + } + + default: + return state + } +} diff --git a/packages/gatsby/src/redux/run-sift.js b/packages/gatsby/src/redux/run-sift.js index aba8ae2936406..bd511eaf7b2cd 100644 --- a/packages/gatsby/src/redux/run-sift.js +++ b/packages/gatsby/src/redux/run-sift.js @@ -15,13 +15,14 @@ const { } = require(`../db/common/query`) const { ensureIndexByTypedChain, - getNodesByTypedChain, + ensureIndexByElemMatch, + getFilterCacheByTypedChain, addResolvedNodes, getNode: siftGetNode, } = require(`./nodes`) /** - * Creates a key for the filterCache + * Creates a key for one filterCache inside FiltersCache * * @param {Array} typeNames * @param {DbQuery} filter @@ -38,6 +39,9 @@ const createTypedFilterCacheKey = (typeNames, filter) => { if (f.type === `elemMatch`) { let q /*: IDbQueryElemMatch*/ = f f = q.nestedQuery + // Make distinction between filtering `a.elemMatch.b.eq` and `a.b.eq` + // In practice this is unlikely to be an issue, but it might + paths.push(`elemMatch`) } else { let q /*: IDbQueryQuery*/ = f comparator = q.query.comparator @@ -46,7 +50,7 @@ const createTypedFilterCacheKey = (typeNames, filter) => { } // Note: the separators (`,` and `/`) are arbitrary but must be different - return typeNames.join(`,`) + `/` + comparator + `/` + paths.join(`,`) + return typeNames.join(`,`) + `/` + paths.join(`,`) + `/` + comparator } ///////////////////////////////////////////////////////////////////// @@ -135,19 +139,11 @@ function handleMany(siftArgs, nodes) { * * @param {Array} filters Resolved. (Should be checked by caller to exist) * @param {Array} nodeTypeNames - * @param {Map>>} typedKeyValueIndexes + * @param {FiltersCache} filtersCache * @returns {Array | undefined} */ -const runFlatFiltersWithoutSift = ( - filters, - nodeTypeNames, - typedKeyValueIndexes -) => { - const caches = getBucketsForFilters( - filters, - nodeTypeNames, - typedKeyValueIndexes - ) +const runFiltersWithoutSift = (filters, nodeTypeNames, filtersCache) => { + const caches = getBucketsForFilters(filters, nodeTypeNames, filtersCache) if (!caches) { // Let Sift take over as fallback @@ -179,55 +175,155 @@ const runFlatFiltersWithoutSift = ( /** * @param {Array} filters * @param {Array} nodeTypeNames - * @param {Map>>} typedKeyValueIndexes + * @param {FiltersCache} filtersCache * @returns {Array> | undefined} Undefined means at least one * cache was not found. Must fallback to sift. */ -const getBucketsForFilters = (filters, nodeTypeNames, typedKeyValueIndexes) => { - const caches /*: Array>>*/ = [] +const getBucketsForFilters = (filters, nodeTypeNames, filtersCache) => { + const filterCaches /*: Array*/ = [] // Fail fast while trying to create and get the value-cache for each path let every = filters.every((filter /*: DbQuery*/) => { - let { - path: chain, - query: { value: targetValue }, - } = filter - let cacheKey = createTypedFilterCacheKey(nodeTypeNames, filter) + if (filter.type === `query`) { + // (Let TS warn us if a new query type gets added) + const q /*: IDbQueryQuery */ = filter + return getBucketsForQueryFilter( + cacheKey, + q, + nodeTypeNames, + filtersCache, + filterCaches + ) + } else { + // (Let TS warn us if a new query type gets added) + const q /*: IDbQueryElemMatch*/ = filter + return collectBucketForElemMatch( + cacheKey, + q, + nodeTypeNames, + filtersCache, + filterCaches + ) + } + }) - ensureIndexByTypedChain( - cacheKey, - chain, - nodeTypeNames, - typedKeyValueIndexes - ) + if (every) { + return filterCaches + } - const nodesByKeyValue = getNodesByTypedChain( - cacheKey, - targetValue, - typedKeyValueIndexes - ) + // "failed at least one" + return undefined +} + +/** + * Fetch all buckets for given query filter. That means it's not elemMatch. + * + * @param {FilterCacheKey} cacheKey + * @param {IDbQueryQuery} filter + * @param {Array} nodeTypeNames + * @param {FiltersCache} filtersCache + * @param {Array} filterCaches + * @returns {boolean} false means soft fail, filter must go through Sift + */ +const getBucketsForQueryFilter = ( + cacheKey, + filter, + nodeTypeNames, + filtersCache, + filterCaches +) => { + let { + path: chain, + query: { value: targetValue }, + } = filter - // If we couldn't find the needle then maybe sift can, for example if the - // schema contained a proxy; `slug: String @proxy(from: "slugInternal")` - // There are also cases (and tests) where id exists with a different type - if (!nodesByKeyValue) { - return false + if (!filtersCache.has(cacheKey)) { + ensureIndexByTypedChain(cacheKey, chain, nodeTypeNames, filtersCache) + } + + const filterCache = getFilterCacheByTypedChain( + cacheKey, + targetValue, + filtersCache + ) + + // If we couldn't find the needle then maybe sift can, for example if the + // schema contained a proxy; `slug: String @proxy(from: "slugInternal")` + // There are also cases (and tests) where id exists with a different type + if (!filterCache) { + return false + } + + // In all other cases this must be a non-empty Set because the indexing + // mechanism does not create a Set unless there's a IGatsbyNode for it + filterCaches.push(filterCache) + + return true +} + +/** + * @param {string} typedKey + * @param {IDbQueryElemMatch} filter + * @param {Array} nodeTypeNames + * @param {FiltersCache} filtersCache + * @param {Array} filterCaches Matching node sets are put in this array + */ +const collectBucketForElemMatch = ( + typedKey, + filter, + nodeTypeNames, + filtersCache, + filterCaches +) => { + // Get comparator and target value for this elemMatch + let comparator = `` + let targetValue = null + let f /*: DbQuery*/ = filter + while (f) { + if (f.type === `elemMatch`) { + const q /*: IDbQueryElemMatch */ = f + f = q.nestedQuery + } else { + const q /*: IDbQueryQuery */ = f + comparator = q.query.comparator + targetValue = q.query.value + break } + } + + if ( + ![ + `$eq`, + // "$lte", + // "$gte", + ].includes(comparator) + ) { + return false + } - // In all other cases this must be a non-empty Set because the indexing - // mechanism does not create a Set unless there's a IGatsbyNode for it - caches.push(nodesByKeyValue) + if (!filtersCache.has(typedKey)) { + ensureIndexByElemMatch(typedKey, filter, nodeTypeNames, filtersCache) + } - return true - }) + const nodesByKeyValue /*: Set | undefined*/ = getFilterCacheByTypedChain( + typedKey, + targetValue, + filtersCache + ) - if (every) { - return caches + // If we couldn't find the needle then maybe sift can, for example if the + // schema contained a proxy; `slug: String @proxy(from: "slugInternal")` + // There are also cases (and tests) where id exists with a different type + if (!nodesByKeyValue) { + return false } - // "failed at least one" - return undefined + // In all other cases this must be a non-empty Set because the indexing + // mechanism does not create a Set unless there's a IGatsbyNode for it + filterCaches.push(nodesByKeyValue) + + return true } /** @@ -238,12 +334,12 @@ const getBucketsForFilters = (filters, nodeTypeNames, typedKeyValueIndexes) => { * @property {boolean} args.firstOnly true if you want to return only the first * result found. This will return a collection of size 1. Not a single element * @property {{filter?: Object, sort?: Object} | undefined} args.queryArgs - * @property {undefined | Map>>} args.typedKeyValueIndexes - * May be undefined. A cache of indexes where you can look up Nodes grouped - * by a key: `types.join(',')+'/'+filterPath.join('+')`, which yields a Map - * which holds a Set of Nodes for the value that the filter is trying to eq - * against. If the property is `id` then there is no Set, it's just the IGatsbyNode. - * This object lives in query/query-runner.js and is passed down runQuery + * @property {undefined | null | FiltersCache} args.filtersCache May be null or + * undefined. A cache of indexes where you can look up Nodes grouped by a + * FilterCacheKey, which yields a Map which holds a Set of Nodes for the value + * that the filter is trying to query against. + * This object lives in query/query-runner.js and is passed down runQuery. + * If it is undefined or null, do not consider to use a fast index at all. * @returns Collection of results. Collection will be limited to 1 * if `firstOnly` is true */ @@ -253,7 +349,7 @@ const runFilterAndSort = (args: Object) => { resolvedFields = {}, firstOnly = false, nodeTypeNames, - typedKeyValueIndexes, + filtersCache, stats, } = args @@ -261,7 +357,7 @@ const runFilterAndSort = (args: Object) => { filter, firstOnly, nodeTypeNames, - typedKeyValueIndexes, + filtersCache, resolvedFields, stats ) @@ -279,7 +375,7 @@ exports.runSift = runFilterAndSort * @param {Array | undefined} filterFields * @param {boolean} firstOnly * @param {Array} nodeTypeNames - * @param {undefined | Map>>} typedKeyValueIndexes + * @param {undefined | null | FiltersCache} filtersCache * @param resolvedFields * @returns {Array | undefined} Collection of results. Collection * will be limited to 1 if `firstOnly` is true @@ -288,7 +384,7 @@ const applyFilters = ( filterFields, firstOnly, nodeTypeNames, - typedKeyValueIndexes, + filtersCache, resolvedFields, stats ) => { @@ -314,7 +410,7 @@ const applyFilters = ( } } - const result = filterWithoutSift(filters, nodeTypeNames, typedKeyValueIndexes) + const result = filterWithoutSift(filters, nodeTypeNames, filtersCache) if (result) { if (stats) { stats.totalIndexHits++ @@ -354,22 +450,37 @@ const filterToStats = ( * * @param {Array} filters Resolved. (Should be checked by caller to exist) * @param {Array} nodeTypeNames - * @param {Map>>} typedKeyValueIndexes - * @returns {Array|undefined} Collection of results + * @param {undefined | null | FiltersCache} filtersCache + * @returns {Array | undefined} Collection of results */ -const filterWithoutSift = (filters, nodeTypeNames, typedKeyValueIndexes) => { +const filterWithoutSift = (filters, nodeTypeNames, filtersCache) => { // This can also be `$ne`, `$in` or any other grapqhl comparison op + + if (!filtersCache) { + // If no filter cache is passed on, explicitly don't use one + return undefined + } + + if (filters.length === 0) { + // If no filters are given, go through Sift. This does not appear to be + // slower than s + // hortcutting it here. + return undefined + } + if ( - !typedKeyValueIndexes || - filters.length === 0 || // TODO: we should special case this filters.some( - filter => filter.type === `elemMatch` || filter.query.comparator !== `$eq` + filter => + filter.type === `query` && // enabled + // filter.type === `elemMatch` || // disabled + ![`$eq`].includes(filter.query.comparator) ) ) { + // If there's a filter with non-supported op, stop now. return undefined } - return runFlatFiltersWithoutSift(filters, nodeTypeNames, typedKeyValueIndexes) + return runFiltersWithoutSift(filters, nodeTypeNames, filtersCache) } // Not a public API diff --git a/packages/gatsby/src/redux/types.ts b/packages/gatsby/src/redux/types.ts index dce585b7f6970..b871c5bd76f0b 100644 --- a/packages/gatsby/src/redux/types.ts +++ b/packages/gatsby/src/redux/types.ts @@ -1,11 +1,21 @@ import { IProgram } from "../commands/types" -import { GraphQLSchema } from "graphql" +import { GraphQLFieldExtensionDefinition } from "../schema/extensions" +import { DocumentNode, GraphQLSchema } from "graphql" import { SchemaComposer } from "graphql-compose" type SystemPath = string type Identifier = string type StructuredLog = any // TODO this should come from structured log interface +export interface IRedirect { + fromPath: string + toPath: string + isPermanent?: boolean + redirectInBrowser?: boolean + // Users can add anything to this createRedirect API + [key: string]: any +} + export enum ProgramStatus { BOOTSTRAP_FINISHED = `BOOTSTRAP_FINISHED`, BOOTSTRAP_QUERY_RUNNING_FINISHED = `BOOTSTRAP_QUERY_RUNNING_FINISHED`, @@ -62,6 +72,15 @@ export interface IGatsbyNode { [key: string]: unknown } +export interface IGatsbyPlugin { + name: string + version: string +} + +export interface IGatsbyPluginContext { + [key: string]: (...args: any[]) => any +} + type GatsbyNodes = Map export interface IGatsbyState { @@ -140,7 +159,7 @@ export interface IGatsbyState { } webpack: any // TODO This should be the output from ./utils/webpack.config.js webpackCompilationHash: string - redirects: any[] // TODO + redirects: IRedirect[] babelrc: { stages: { develop: any // TODO @@ -214,6 +233,10 @@ export type ActionsUnion = | IQueryExtractionBabelErrorAction | ISetProgramStatusAction | IPageQueryRunAction + | IAddThirdPartySchema + | ICreateTypes + | ICreateFieldExtension + | IPrintTypeDefinitions export interface ICreatePageDependencyAction { type: `CREATE_COMPONENT_DEPENDENCY` @@ -242,7 +265,7 @@ export interface IReplaceComponentQueryAction { export interface IReplaceStaticQueryAction { type: `REPLACE_STATIC_QUERY` - plugin: Plugin | null | undefined + plugin: IGatsbyPlugin | null | undefined payload: { name: string componentPath: string @@ -254,28 +277,28 @@ export interface IReplaceStaticQueryAction { export interface IQueryExtractedAction { type: `QUERY_EXTRACTED` - plugin: Plugin + plugin: IGatsbyPlugin traceId: string | undefined payload: { componentPath: string; query: string } } export interface IQueryExtractionGraphQLErrorAction { type: `QUERY_EXTRACTION_GRAPHQL_ERROR` - plugin: Plugin + plugin: IGatsbyPlugin traceId: string | undefined payload: { componentPath: string; error: string } } export interface IQueryExtractedBabelSuccessAction { type: `QUERY_EXTRACTION_BABEL_SUCCESS` - plugin: Plugin + plugin: IGatsbyPlugin traceId: string | undefined payload: { componentPath: string } } export interface IQueryExtractionBabelErrorAction { type: `QUERY_EXTRACTION_BABEL_ERROR` - plugin: Plugin + plugin: IGatsbyPlugin traceId: string | undefined payload: { componentPath: string @@ -285,21 +308,71 @@ export interface IQueryExtractionBabelErrorAction { export interface ISetProgramStatusAction { type: `SET_PROGRAM_STATUS` - plugin: Plugin + plugin: IGatsbyPlugin traceId: string | undefined payload: ProgramStatus } export interface IPageQueryRunAction { type: `PAGE_QUERY_RUN` - plugin: Plugin + plugin: IGatsbyPlugin traceId: string | undefined payload: { path: string; componentPath: string; isPage: boolean } } export interface IRemoveStaleJobAction { type: `REMOVE_STALE_JOB_V2` - plugin: Plugin + plugin: IGatsbyPlugin traceId?: string payload: { contentDigest: string } } + +export interface IAddThirdPartySchema { + type: `ADD_THIRD_PARTY_SCHEMA` + plugin: IGatsbyPlugin + traceId?: string + payload: GraphQLSchema +} + +export interface ICreateTypes { + type: `CREATE_TYPES` + plugin: IGatsbyPlugin + traceId?: string + payload: DocumentNode | DocumentNode[] +} + +export interface ICreateFieldExtension { + type: `CREATE_FIELD_EXTENSION` + plugin: IGatsbyPlugin + traceId?: string + payload: { + name: string + extension: GraphQLFieldExtensionDefinition + } +} + +export interface IPrintTypeDefinitions { + type: `PRINT_SCHEMA_REQUESTED` + plugin: IGatsbyPlugin + traceId?: string + payload: { + path?: string + include?: { types?: Array; plugins?: Array } + exclude?: { types?: Array; plugins?: Array } + withFieldTypes?: boolean + } +} + +export interface ICreateResolverContext { + type: `CREATE_RESOLVER_CONTEXT` + plugin: IGatsbyPlugin + traceId?: string + payload: + | IGatsbyPluginContext + | { [camelCasedPluginNameWithoutPrefix: string]: IGatsbyPluginContext } +} + +export interface ICreateRedirectAction { + type: `CREATE_REDIRECT` + payload: IRedirect +} diff --git a/packages/gatsby/src/schema/__tests__/node-model.js b/packages/gatsby/src/schema/__tests__/node-model.js index 71378902f45bc..cb736bab5680f 100644 --- a/packages/gatsby/src/schema/__tests__/node-model.js +++ b/packages/gatsby/src/schema/__tests__/node-model.js @@ -286,9 +286,9 @@ describe(`NodeModel`, () => { }) }) ;[ - { desc: `with cache`, cb: () => new Map() }, // Avoids sift for flat filters + { desc: `with cache`, cb: () /*:FiltersCache*/ => new Map() }, // Avoids sift for flat filters { desc: `no cache`, cb: () => null }, // Always goes through sift - ].forEach(({ desc, cb: createIndexCache }) => { + ].forEach(({ desc, cb: createFiltersCache }) => { describe(`runQuery [${desc}]`, () => { it(`returns first result only`, async () => { const type = `Post` @@ -296,7 +296,7 @@ describe(`NodeModel`, () => { filter: { frontmatter: { published: { eq: false } } }, } const firstOnly = true - nodeModel.replaceTypeKeyValueCache(createIndexCache()) + nodeModel.replaceTypeKeyValueCache(createFiltersCache()) const result = await nodeModel.runQuery({ query, firstOnly, @@ -311,7 +311,7 @@ describe(`NodeModel`, () => { filter: { frontmatter: { published: { eq: false } } }, } const firstOnly = false - nodeModel.replaceTypeKeyValueCache(createIndexCache()) + nodeModel.replaceTypeKeyValueCache(createFiltersCache()) const result = await nodeModel.runQuery({ query, firstOnly, @@ -328,7 +328,7 @@ describe(`NodeModel`, () => { filter: { frontmatter: { published: { eq: false } } }, } const firstOnly = false - nodeModel.replaceTypeKeyValueCache(createIndexCache()) + nodeModel.replaceTypeKeyValueCache(createFiltersCache()) await nodeModel.runQuery( { query, @@ -354,7 +354,7 @@ describe(`NodeModel`, () => { filter: { frontmatter: { published: { eq: false } } }, } const firstOnly = false - nodeModel.replaceTypeKeyValueCache(createIndexCache()) + nodeModel.replaceTypeKeyValueCache(createFiltersCache()) await nodeModel.withContext({ path: `/` }).runQuery({ query, firstOnly, @@ -377,7 +377,7 @@ describe(`NodeModel`, () => { filter: { frontmatter: { published: { eq: false } } }, } const firstOnly = false - nodeModel.replaceTypeKeyValueCache(createIndexCache()) + nodeModel.replaceTypeKeyValueCache(createFiltersCache()) await nodeModel.runQuery( { query, @@ -397,7 +397,7 @@ describe(`NodeModel`, () => { const type = `AllFiles` const query = {} const firstOnly = true - nodeModel.replaceTypeKeyValueCache(createIndexCache()) + nodeModel.replaceTypeKeyValueCache(createFiltersCache()) const result = nodeModel.runQuery({ query, firstOnly, @@ -412,7 +412,7 @@ describe(`NodeModel`, () => { const type = `TeamMember` const query = { name: { ne: null } } const firstOnly = true - nodeModel.replaceTypeKeyValueCache(createIndexCache()) + nodeModel.replaceTypeKeyValueCache(createFiltersCache()) const result = await nodeModel.runQuery({ query, firstOnly, @@ -429,7 +429,7 @@ describe(`NodeModel`, () => { }, } const firstOnly = false - nodeModel.replaceTypeKeyValueCache(createIndexCache()) + nodeModel.replaceTypeKeyValueCache(createFiltersCache()) const result = await nodeModel.runQuery({ query, firstOnly, @@ -448,7 +448,7 @@ describe(`NodeModel`, () => { }, } const firstOnly = true - nodeModel.replaceTypeKeyValueCache(createIndexCache()) + nodeModel.replaceTypeKeyValueCache(createFiltersCache()) const result = await nodeModel.runQuery({ query, firstOnly, @@ -551,11 +551,11 @@ describe(`NodeModel`, () => { }) }) ;[ - { desc: `with cache`, cb: () => new Map() }, // Avoids sift for flat filters + { desc: `with cache`, cb: () /*:FiltersCache*/ => new Map() }, // Avoids sift for flat filters { desc: `no cache`, cb: () => null }, // Always goes through sift - ].forEach(({ desc, cb: createIndexCache }) => { + ].forEach(({ desc, cb: createFiltersCache }) => { it(`[${desc}] should not resolve prepared nodes more than once`, async () => { - nodeModel.replaceTypeKeyValueCache(createIndexCache()) + nodeModel.replaceTypeKeyValueCache(createFiltersCache()) await nodeModel.runQuery( { query: { filter: { betterTitle: { eq: `foo` } } }, @@ -566,7 +566,7 @@ describe(`NodeModel`, () => { ) expect(resolveBetterTitleMock.mock.calls.length).toBe(2) expect(resolveOtherTitleMock.mock.calls.length).toBe(0) - nodeModel.replaceTypeKeyValueCache(createIndexCache()) + nodeModel.replaceTypeKeyValueCache(createFiltersCache()) await nodeModel.runQuery( { query: { filter: { betterTitle: { eq: `foo` } } }, @@ -577,7 +577,7 @@ describe(`NodeModel`, () => { ) expect(resolveBetterTitleMock.mock.calls.length).toBe(2) expect(resolveOtherTitleMock.mock.calls.length).toBe(0) - nodeModel.replaceTypeKeyValueCache(createIndexCache()) + nodeModel.replaceTypeKeyValueCache(createFiltersCache()) await nodeModel.runQuery( { query: { @@ -590,7 +590,7 @@ describe(`NodeModel`, () => { ) expect(resolveBetterTitleMock.mock.calls.length).toBe(2) expect(resolveOtherTitleMock.mock.calls.length).toBe(2) - nodeModel.replaceTypeKeyValueCache(createIndexCache()) + nodeModel.replaceTypeKeyValueCache(createFiltersCache()) await nodeModel.runQuery( { query: { @@ -603,7 +603,7 @@ describe(`NodeModel`, () => { ) expect(resolveBetterTitleMock.mock.calls.length).toBe(2) expect(resolveOtherTitleMock.mock.calls.length).toBe(2) - nodeModel.replaceTypeKeyValueCache(createIndexCache()) + nodeModel.replaceTypeKeyValueCache(createFiltersCache()) await nodeModel.runQuery( { query: { @@ -619,7 +619,7 @@ describe(`NodeModel`, () => { }) it(`[${desc}] can filter by resolved fields`, async () => { - nodeModel.replaceTypeKeyValueCache(createIndexCache()) + nodeModel.replaceTypeKeyValueCache(createFiltersCache()) const result = await nodeModel.runQuery( { query: { @@ -764,12 +764,12 @@ describe(`NodeModel`, () => { }) }) ;[ - { desc: `with index cache`, cb: () => new Map() }, // Avoids sift - { desc: `no index cache`, cb: () => null }, // Requires sift - ].forEach(({ desc, cb: createIndexCache }) => { + { desc: `with cache`, cb: () => new Map() }, // Avoids sift + { desc: `no cache`, cb: () => null }, // Requires sift + ].forEach(({ desc, cb: createFiltersCache }) => { describe(`[${desc}] Tracks nodes returned by queries`, () => { it(`Tracks objects when running query without filter`, async () => { - nodeModel.replaceTypeKeyValueCache(createIndexCache()) + nodeModel.replaceTypeKeyValueCache(createFiltersCache()) const result = await nodeModel.runQuery({ query: {}, type: schema.getType(`Test`), @@ -786,7 +786,7 @@ describe(`NodeModel`, () => { }) it(`Tracks objects when running query with filter`, async () => { - nodeModel.replaceTypeKeyValueCache(createIndexCache()) + nodeModel.replaceTypeKeyValueCache(createFiltersCache()) const result = await nodeModel.runQuery({ query: { filter: { diff --git a/packages/gatsby/src/schema/__tests__/run-query.js b/packages/gatsby/src/schema/__tests__/run-query.js index 36bed2afce7bb..cad1d109f06e7 100644 --- a/packages/gatsby/src/schema/__tests__/run-query.js +++ b/packages/gatsby/src/schema/__tests__/run-query.js @@ -1,7 +1,9 @@ const { runQuery: nodesQuery } = require(`../../db/nodes`) const { store } = require(`../../redux`) const { actions } = require(`../../redux/actions`) -require(`../../db/__tests__/fixtures/ensure-loki`)() + +// Note: loki does not match redux in certain edge cases in this file +const IS_LOKI = require(`../../db/__tests__/fixtures/ensure-loki`)() const makeNodes = () => [ { @@ -14,6 +16,8 @@ const makeNodes = () => [ hair: 1, date: `2006-07-22T22:39:53.000Z`, anArray: [1, 2, 3, 4], + strArray: `["testing", "serialization", "hacks"]`, + nullArray: [1, null, 3, 4], key: { withEmptyArray: [], }, @@ -35,9 +39,22 @@ const makeNodes = () => [ { aString: `some string`, aNumber: 2, anArray: [1, 2] }, ], boolean: true, + nil: `not null`, nestedRegex: { field: `har har`, }, + num_null_not: 1, + num_not_null: 1, + null_num_not: null, + null_not_num: null, + str_null_not: `x`, + str_not_null: `x`, + null_str_not: null, + null_not_str: null, + obj_null_not: { y: 5 }, + obj_not_null: { y: 5 }, + null_obj_not: null, + null_not_obj: null, }, { id: `1`, @@ -48,6 +65,8 @@ const makeNodes = () => [ float: 2.5, hair: 2, anArray: [1, 2, 5, 4], + strArray: `[5,6,7,8]`, + nullArray: [1, 3, 4], waxOnly: { foo: true, bar: { baz: true }, @@ -64,18 +83,29 @@ const makeNodes = () => [ circle: `happy`, }, boolean: false, - data: { - tags: [ + nil: null, + undef: undefined, + singleElem: { + things: [ { - tag: { - document: [ - { - data: { - tag: `Design System`, - }, - number: 3, - }, - ], + one: { + two: { + three: 123, + }, + }, + }, + { + one: { + five: { + three: 153, + }, + }, + }, + { + one: { + two: { + three: 404, + }, }, }, ], @@ -83,6 +113,20 @@ const makeNodes = () => [ nestedRegex: { field: ``, }, + strSecondOnly: `needle`, + boolSecondOnly: false, + num_null_not: null, + null_num_not: 1, + not_null_num: null, + not_num_null: 1, + str_null_not: null, + null_str_not: `x`, + not_null_str: null, + not_str_null: `x`, + obj_null_not: null, + null_obj_not: { y: 5 }, + not_null_obj: null, + not_obj_null: { y: 5 }, }, { id: `2`, @@ -132,6 +176,18 @@ const makeNodes = () => [ }, ], }, + num_not_null: null, + null_not_num: 1, + not_null_num: 1, + not_num_null: null, + str_not_null: null, + null_not_str: `x`, + not_null_str: `x`, + not_str_null: null, + obj_not_null: null, + null_not_obj: { y: 5 }, + not_null_obj: { y: 5 }, + not_obj_null: null, }, ] @@ -160,7 +216,7 @@ function resetDb(nodes) { ) } -async function runQuery(queryArgs) { +async function runQuery(queryArgs, filtersCache) { const nodes = makeNodes() resetDb(nodes) const { sc, type: gqlType } = makeGqlType(nodes) @@ -170,362 +226,1343 @@ async function runQuery(queryArgs) { queryArgs, gqlComposer: sc, nodeTypeNames: [gqlType.name], + filtersCache, } return await nodesQuery(args) } -async function runFilter(filter) { - return await runQuery({ filter }) +async function runFilterOnCache(filter, filtersCache) { + return await runQuery({ filter }, filtersCache) } -describe(`Filter fields`, () => { - it(`handles eq operator`, async () => { - let result = await runFilter({ hair: { eq: 2 } }) - - expect(result.length).toEqual(1) - expect(result[0].hair).toEqual(2) +it(`should use the cache argument`, async () => { + // Loki does not use this system at all + if (IS_LOKI) return + + const filtersCache = new Map() + const result = await runFilterOnCache({ hair: { eq: 2 } }, filtersCache) + + // Validate answer + expect(result.length).toEqual(1) + expect(result[0].hair).toEqual(2) + + // Confirm cache is not ignored + expect(filtersCache.size === 1).toBe(true) + filtersCache.forEach((filterCache, cacheKey) => { + // This test will change when the composition of the FilterCache changes + // For now it should be a Map of values to Set of nodes + expect(filterCache instanceof Map).toBe(true) + // There ought to be at least one value mapped (probably more, shrug) + expect(filterCache.size >= 1).toBe(true) }) +}) - it(`handles eq operator with false value`, async () => { - let result = await runFilter({ boolean: { eq: false } }) +// Make sure to test fast filters (with cache) and Sift (without cache) +;[ + { desc: `without cache`, cb: () => null }, // Forces no cache, must use Sift + { desc: `with cache`, cb: () => new Map() }, +].forEach(({ desc, cb: createFiltersCache }) => { + async function runFilter(filter) { + return runFilterOnCache(filter, createFiltersCache()) + } - expect(result.length).toEqual(1) - expect(result[0].name).toEqual(`The Mad Wax`) - }) + describe(desc, () => { + describe(`Filter fields`, () => { + it(`handles eq operator with number value`, async () => { + let result = await runFilter({ hair: { eq: 2 } }) - it(`handles eq operator with 0`, async () => { - let result = await runFilter({ hair: { eq: 0 } }) + expect(result.length).toEqual(1) + expect(result[0].hair).toEqual(2) + }) - expect(result.length).toEqual(1) - expect(result[0].hair).toEqual(0) - }) + it(`handles eq operator with false value`, async () => { + let result = await runFilter({ boolean: { eq: false } }) - it(`handles ne operator`, async () => { - let result = await runFilter({ hair: { ne: 2 } }) + expect(result.length).toEqual(1) + expect(result[0].name).toEqual(`The Mad Wax`) + }) - expect(result.length).toEqual(2) - expect(result[0].hair).toEqual(1) - }) + it(`handles eq operator with 0`, async () => { + let result = await runFilter({ hair: { eq: 0 } }) - it(`handles ne: true operator`, async () => { - let result = await runFilter({ boolean: { ne: true } }) + expect(result.length).toEqual(1) + expect(result[0].hair).toEqual(0) + }) - expect(result.length).toEqual(2) - }) + it(`handles eq operator with null`, async () => { + let result = await runFilter({ nil: { eq: null } }) - it(`handles nested ne: true operator`, async () => { - let result = await runFilter({ waxOnly: { foo: { ne: true } } }) + // Also return nodes that do not have the property at all (NULL in db) + expect(result.length).toEqual(2) + }) - expect(result.length).toEqual(2) - }) + // grapqhl would never pass on `undefined` + // it(`handles eq operator with undefined`, async () => { + // let result = await runFilter({ undef: { eq: undefined } }) + // + // expect(result.length).toEqual(?) + // expect(result[0].hair).toEqual(?) + // }) - it(`handles deeply nested ne: true operator`, async () => { - let result = await runFilter({ - waxOnly: { bar: { baz: { ne: true } } }, - }) + it(`handles eq operator with serialized array value`, async () => { + let result = await runFilter({ strArray: { eq: `[5,6,7,8]` } }) - expect(result.length).toEqual(2) - }) + expect(result.length).toEqual(1) + expect(result[0].name).toEqual(`The Mad Wax`) + }) - it(`handles lt operator`, async () => { - let result = await runFilter({ hair: { lt: 2 } }) + it(`handles ne operator`, async () => { + if (IS_LOKI) return - expect(result.length).toEqual(2) - expect(result[0].hair).toEqual(1) - expect(result[1].hair).toEqual(0) - }) + let result = await runFilter({ hair: { ne: 2 } }) - it(`handles lte operator`, async () => { - let result = await runFilter({ hair: { lte: 1 } }) + expect(result.length).toEqual(2) + expect(result[0].hair).toEqual(1) + }) - expect(result.length).toEqual(2) - expect(result[0].hair).toEqual(1) - expect(result[1].hair).toEqual(0) - }) + it(`handles ne: true operator`, async () => { + let result = await runFilter({ boolean: { ne: true } }) - it(`handles gt operator`, async () => { - let result = await runFilter({ hair: { gt: 0 } }) + expect(result.length).toEqual(2) + }) - expect(result.length).toEqual(2) - expect(result[0].hair).toEqual(1) - expect(result[1].hair).toEqual(2) - }) + it(`handles nested ne: true operator`, async () => { + let result = await runFilter({ waxOnly: { foo: { ne: true } } }) - it(`handles gte operator`, async () => { - let result = await runFilter({ hair: { gte: 1 } }) + expect(result.length).toEqual(2) + }) - expect(result.length).toEqual(2) - expect(result[0].hair).toEqual(1) - expect(result[1].hair).toEqual(2) - }) + it(`handles ne operator with 0`, async () => { + let result = await runFilter({ hair: { ne: 0 } }) - it(`handles the regex operator`, async () => { - let result = await runFilter({ name: { regex: `/^the.*wax/i` } }) - expect(result.length).toEqual(2) - expect(result[0].name).toEqual(`The Mad Wax`) - }) + expect(result.length).toEqual(2) + }) - it(`handles the nested regex operator`, async () => { - let result = await runFilter({ nestedRegex: { field: { regex: `/.*/` } } }) - expect(result.length).toEqual(2) - expect(result[0].id).toEqual(`0`) - expect(result[1].id).toEqual(`1`) - }) + it(`handles ne operator with null`, async () => { + if (IS_LOKI) return - it(`handles the in operator for strings`, async () => { - let result = await runFilter({ string: { in: [`b`, `c`] } }) - expect(result.length).toEqual(2) - expect(result[0].index).toEqual(1) - }) + let result = await runFilter({ nil: { ne: null } }) - it(`handles the in operator for ints`, async () => { - let result = await runFilter({ index: { in: [0, 2] } }) - expect(result.length).toEqual(2) - expect(result[0].index).toEqual(0) - expect(result[1].index).toEqual(2) - }) + // Should only return nodes who do have the property, not set to null + expect(result.length).toEqual(1) + expect(result[0].name).toEqual(`The Mad Max`) + }) - it(`handles the in operator for floats`, async () => { - let result = await runFilter({ float: { in: [1.5, 2.5] } }) - expect(result.length).toEqual(2) - expect(result[0].index).toEqual(0) - expect(result[1].index).toEqual(1) - }) + // grapqhl would never pass on `undefined` + // it(`handles ne operator with undefined`, async () => { + // let result = await runFilter({ undef: { ne: undefined } }) + // + // expect(result.length).toEqual(?) + // }) - it(`handles the in operator for booleans`, async () => { - let result = await runFilter({ boolean: { in: [true] } }) - expect(result.length).toEqual(1) // 2 - expect(result[0].index).toEqual(0) - // expect(result[1].index).toEqual(2) - }) + it(`handles deeply nested ne: true operator`, async () => { + let result = await runFilter({ + waxOnly: { bar: { baz: { ne: true } } }, + }) - it(`handles the in operator for array`, async () => { - let result = await runFilter({ anArray: { in: [5] } }) - expect(result.length).toEqual(1) - expect(result[0].name).toEqual(`The Mad Wax`) - }) + expect(result.length).toEqual(2) + }) - it(`handles the nested in operator for array of strings`, async () => { - let result = await runFilter({ frontmatter: { tags: { in: [`moo`] } } }) - expect(result).toHaveLength(1) - expect(result[0].name).toEqual(`The Mad Max`) - }) + it(`handles lt operator with number`, async () => { + let result = await runFilter({ hair: { lt: 2 } }) - it(`handles the elemMatch operator for array of objects`, async () => { - let result = await runFilter({ - data: { - tags: { - elemMatch: { - tag: { - document: { - elemMatch: { - data: { - tag: { eq: `Gatsby` }, - }, - }, - }, - }, - }, - }, - }, - }) + expect(result.length).toEqual(2) + expect(result[0].hair).toEqual(1) + expect(result[1].hair).toEqual(0) + }) - expect(result.length).toEqual(1) - expect(result[0].index).toEqual(2) - }) + it(`handles lt operator with null`, async () => { + if (IS_LOKI) return - it(`handles the elemMatch operator for array of objects (2)`, async () => { - let result = await runFilter({ - data: { - tags: { - elemMatch: { - tag: { - document: { - elemMatch: { - data: { - tag: { eq: `Design System` }, - }, - }, - }, - }, - }, - }, - }, - }) + let result = await runFilter({ nil: { lt: null } }) - expect(result.length).toEqual(2) - expect(result[0].index).toEqual(1) - expect(result[1].index).toEqual(2) - }) + // Nothing is lt null + expect(result).toEqual(null) + }) - it(`handles the elemMatch operator for array of objects (number)`, async () => { - let result = await runFilter({ - data: { - tags: { - elemMatch: { - tag: { - document: { - elemMatch: { - number: { lt: 4 }, - }, - }, - }, - }, - }, - }, - }) + it(`handles lte operator with number`, async () => { + let result = await runFilter({ hair: { lte: 1 } }) - expect(result.length).toEqual(1) - expect(result[0].index).toEqual(1) - }) + expect(result.length).toEqual(2) + expect(result[0].hair).toEqual(1) + expect(result[1].hair).toEqual(0) + }) - it(`handles the nin operator for array`, async () => { - let result = await runFilter({ anArray: { nin: [5] } }) + it(`handles lte operator with null`, async () => { + if (IS_LOKI) return - expect(result.length).toEqual(2) + let result = await runFilter({ nil: { lte: null } }) - result.forEach(edge => { - expect(edge.anArray).not.toEqual(expect.arrayContaining([5])) - }) - }) + // lte null matches null but no nodes without the property (NULL) + expect(result.length).toEqual(1) + expect(result[0].name).toEqual(`The Mad Wax`) + expect(result[0].nil).toEqual(null) + }) - it(`handles the nin operator for strings`, async () => { - let result = await runFilter({ string: { nin: [`b`, `c`] } }) + it(`handles gt operator with number`, async () => { + let result = await runFilter({ hair: { gt: 0 } }) - expect(result.length).toEqual(1) - result.forEach(edge => { - expect(edge.string).not.toEqual(`b`) - expect(edge.string).not.toEqual(`c`) - }) - }) + expect(result.length).toEqual(2) + expect(result[0].hair).toEqual(1) + expect(result[1].hair).toEqual(2) + }) - it(`handles the nin operator for ints`, async () => { - let result = await runFilter({ index: { nin: [0, 2] } }) + it(`handles gt operator with null`, async () => { + if (IS_LOKI) return - expect(result.length).toEqual(1) - result.forEach(edge => { - expect(edge.index).not.toEqual(0) - expect(edge.index).not.toEqual(2) - }) - }) + let result = await runFilter({ nil: { gt: null } }) - it(`handles the nin operator for floats`, async () => { - let result = await runFilter({ float: { nin: [1.5] } }) + // Nothing is gt null + expect(result).toEqual(null) + }) - expect(result.length).toEqual(2) - result.forEach(edge => { - expect(edge.float).not.toEqual(1.5) - }) - }) + it(`handles gte operator with number`, async () => { + let result = await runFilter({ hair: { gte: 1 } }) - it(`handles the nin operator for booleans`, async () => { - let result = await runFilter({ boolean: { nin: [true, null] } }) + expect(result.length).toEqual(2) + expect(result[0].hair).toEqual(1) + expect(result[1].hair).toEqual(2) + }) - expect(result.length).toEqual(1) - expect(result[0].boolean).toBe(false) - }) + it(`handles gte operator with null`, async () => { + if (IS_LOKI) return - it(`handles the glob operator`, async () => { - let result = await runFilter({ name: { glob: `*Wax` } }) + let result = await runFilter({ nil: { gte: null } }) - expect(result.length).toEqual(2) - expect(result[0].name).toEqual(`The Mad Wax`) - }) + // lte null matches null but no nodes without the property (NULL) + expect(result.length).toEqual(1) + expect(result[0].name).toEqual(`The Mad Wax`) + expect(result[0].nil).toEqual(null) + }) - it(`filters date fields`, async () => { - let result = await runFilter({ date: { ne: null } }) + it(`handles the regex operator without flags`, async () => { + let result = await runFilter({ name: { regex: `/^The.*Wax/` } }) - expect(result.length).toEqual(2) - expect(result[0].index).toEqual(0) - expect(result[1].index).toEqual(2) - }) + expect(result.length).toEqual(2) + expect(result[0].name).toEqual(`The Mad Wax`) + expect(result[1].name).toEqual(`The Mad Wax`) + }) - it(`handles the eq operator for array field values`, async () => { - const result = await runFilter({ anArray: { eq: 5 } }) + it(`handles the regex operator with i-flag`, async () => { + let result = await runFilter({ name: { regex: `/^the.*wax/i` } }) - expect(result.length).toBe(1) - expect(result[0].index).toBe(1) - }) + expect(result.length).toEqual(2) + expect(result[0].name).toEqual(`The Mad Wax`) + expect(result[1].name).toEqual(`The Mad Wax`) + }) - it(`handles the ne operator for array field values`, async () => { - const result = await runFilter({ anArray: { ne: 1 } }) + it(`handles the nested regex operator`, async () => { + let result = await runFilter({ + nestedRegex: { field: { regex: `/.*/` } }, + }) - expect(result.length).toBe(1) - expect(result[0].index).toBe(2) - }) -}) + expect(result.length).toEqual(2) + expect(result[0].id).toEqual(`0`) + expect(result[1].id).toEqual(`1`) + }) -describe(`collection fields`, () => { - it(`sorts results`, async () => { - let result = await runQuery({ - limit: 10, - sort: { - fields: [`frontmatter.blue`], - order: [`desc`], - }, - }) + it(`does not match double quote for string without it`, async () => { + if (IS_LOKI) return - expect(result.length).toEqual(3) - expect(result[0].name).toEqual(`The Mad Wax`) - }) + let result = await runFilter({ name: { regex: `/"/` } }) - it(`sorts results with desc has null fields first`, async () => { - let result = await runQuery({ - limit: 10, - sort: { - fields: [`waxOnly`], - order: [`desc`], - }, - }) + expect(result).toEqual(null) + }) - expect(result.length).toEqual(3) - expect(result[0].id).toEqual(`0`) - expect(result[1].id).toEqual(`2`) - expect(result[2].id).toEqual(`1`) - }) + it(`handles the in operator for strings`, async () => { + let result = await runFilter({ string: { in: [`b`, `c`] } }) - it(`sorts results with asc has null fields last`, async () => { - let result = await runQuery({ - limit: 10, - sort: { - fields: [`waxOnly`], - order: [`asc`], - }, - }) + expect(result.length).toEqual(2) + expect(result[0].index).toEqual(1) + }) - expect(result.length).toEqual(3) - expect(result[0].id).toEqual(`1`) - expect(result[1].id).toEqual(`2`) - expect(result[2].id).toEqual(`0`) - }) + it(`handles the in operator for ints`, async () => { + let result = await runFilter({ index: { in: [0, 2] } }) - it(`applies specified sort order, and sorts asc by default`, async () => { - let result = await runQuery({ - limit: 10, - sort: { - fields: [`frontmatter.blue`, `id`], - order: [`desc`], // `id` field will be sorted asc - }, - }) + expect(result.length).toEqual(2) + expect(result[0].index).toEqual(0) + expect(result[1].index).toEqual(2) + }) + + it(`handles the in operator for floats`, async () => { + let result = await runFilter({ float: { in: [1.5, 2.5] } }) + + expect(result.length).toEqual(2) + expect(result[0].index).toEqual(0) + expect(result[1].index).toEqual(1) + }) + + it(`handles the in operator for just null`, async () => { + if (IS_LOKI) return + + let result = await runFilter({ nil: { in: [null] } }) + + // Do not include the nodes without a `nil` property + expect(result.length).toEqual(2) + result.forEach(edge => { + // May not have the property, or must be null + expect(edge.nil === undefined || edge.nil === null).toBe(true) + }) + }) + + it(`handles the in operator for double null`, async () => { + if (IS_LOKI) return + + let result = await runFilter({ nil: { in: [null, null] } }) + + // Do not include the nodes without a `nil` property + expect(result.length).toEqual(2) + result.forEach(edge => { + // May not have the property, or must be null + expect(edge.nil === undefined || edge.nil === null).toBe(true) + }) + }) + + it(`handles the in operator for null in int and null`, async () => { + if (IS_LOKI) return + + let result = await runFilter({ nil: { in: [5, null] } }) + + // Include the nodes without a `nil` property + expect(result.length).toEqual(2) + result.forEach(edge => { + // May not have the property, or must be null + expect(edge.nil === undefined || edge.nil === null).toBe(true) + }) + }) + + it(`handles the in operator for int in int and null`, async () => { + let result = await runFilter({ index: { in: [2, null] } }) + + // Include the nodes without a `index` property (there aren't any) + expect(result.length).toEqual(1) + result.forEach(edge => { + expect(edge.index === 2).toBe(true) + }) + }) + + it(`handles the in operator for booleans`, async () => { + let result = await runFilter({ boolean: { in: [true] } }) + + expect(result.length).toEqual(1) + expect(result[0].index).toEqual(0) + expect(result[0].boolean).toEqual(true) + }) + + it(`handles the in operator for array with one element`, async () => { + let result = await runFilter({ anArray: { in: [5] } }) + + // The first one has a 5, the second one does not have a 5, the third does + // not have the property at all (NULL). It should return the first and last. + // (If the target value has `null` then the third should be omitted) + expect(result.length).toEqual(1) + expect(result[0].name).toEqual(`The Mad Wax`) + }) + + it(`handles the in operator for array some elements`, async () => { + let result = await runFilter({ anArray: { in: [20, 5, 300] } }) + + // Same as the test for just `[5]`. 20 and 300 do not appear anywhere. + expect(result.length).toEqual(1) + expect(result[0].name).toEqual(`The Mad Wax`) + }) + + it(`handles the nested in operator for array of strings`, async () => { + let result = await runFilter({ frontmatter: { tags: { in: [`moo`] } } }) + + expect(result.length).toEqual(1) + expect(result[0].name).toEqual(`The Mad Max`) + }) + + it(`handles the elemMatch operator on a proper single tree`, async () => { + let result = await runFilter({ + singleElem: { + things: { + elemMatch: { + one: { + two: { + three: { eq: 123 }, + }, + }, + }, + }, + }, + }) + + expect(result.length).toEqual(1) + }) + + it(`handles the elemMatch operator on the second element`, async () => { + let result = await runFilter({ + singleElem: { + things: { + elemMatch: { + one: { + five: { + three: { eq: 153 }, + }, + }, + }, + }, + }, + }) + + expect(result.length).toEqual(1) + // Should contain the entire array even only one matched + expect(result[0].singleElem.things[0].one.two.three).toEqual(123) + expect(result[0].singleElem.things[1].one.five.three).toEqual(153) + }) + + it(`should return only one node if elemMatch hits multiples`, async () => { + let result = await runFilter({ + singleElem: { + things: { + elemMatch: { + one: { + two: { + three: { lt: 1000 }, // one match is 123, the other 404 + }, + }, + }, + }, + }, + }) + + // The `elemMatch` operator only returns the first nodde that matches so + // even though the `lt 1000` would match two elements in the `things` array + // it will return one node. + expect(result.length).toEqual(1) + expect(result[0].singleElem.things[0].one.two.three).toEqual(123) + expect(result[0].singleElem.things[2].one.two.three).toEqual(404) + }) + + it(`ignores the elemMatch operator on a partial sub tree`, async () => { + if (IS_LOKI) return + + let result = await runFilter({ + singleElem: { + things: { + elemMatch: { + three: { eq: 123 }, + }, + }, + }, + }) + + expect(result).toEqual(null) + }) + + it(`handles the elemMatch operator for array of objects (1)`, async () => { + let result = await runFilter({ + data: { + tags: { + elemMatch: { + tag: { + document: { + elemMatch: { + data: { + tag: { eq: `Gatsby` }, + }, + }, + }, + }, + }, + }, + }, + }) + + expect(result.length).toEqual(1) + expect(result[0].index).toEqual(2) + }) + + it(`handles the elemMatch operator for array of objects (2)`, async () => { + let result = await runFilter({ + data: { + tags: { + elemMatch: { + tag: { + document: { + elemMatch: { + data: { + tag: { eq: `Design System` }, + }, + }, + }, + }, + }, + }, + }, + }) - expect(result.length).toEqual(3) - expect(result[0].id).toEqual(`1`) // blue = 10010, id = 1 - expect(result[1].id).toEqual(`2`) // blue = 10010, id = 2 - expect(result[2].id).toEqual(`0`) // blue = 100, id = 0 - }) + expect(result.length).toEqual(1) + expect(result[0].index).toEqual(2) + }) - it(`applies specified sort order per field`, async () => { - let result = await runQuery({ - limit: 10, - sort: { - fields: [`frontmatter.blue`, `id`], - order: [`desc`, `desc`], // `id` field will be sorted desc - }, + it(`works for elemMatch on boolean field`, async () => { + if (IS_LOKI) return + + let result = await runFilter({ + boolean: { + elemMatch: { + eq: true, + }, + }, + }) + + // Does NOT contain nodes that do not have the field + expect(result.length).toEqual(1) + expect(result[0].boolean).toEqual(true) + }) + + it(`skips nodes without the field for elemMatch on boolean`, async () => { + if (IS_LOKI) return + + let result = await runFilter({ + boolSecondOnly: { + elemMatch: { + eq: false, + }, + }, + }) + + // Does NOT contain nodes that do not have the field so returns 2nd node + expect(result.length).toEqual(1) + expect(result[0].boolSecondOnly).toEqual(false) + }) + + it(`works for elemMatch on string field`, async () => { + if (IS_LOKI) return + + let result = await runFilter({ + string: { + elemMatch: { + eq: `a`, + }, + }, + }) + + // Does NOT contain nodes that do not have the field + expect(result.length).toEqual(1) + expect(result[0].string).toEqual(`a`) + }) + + it(`should return all nodes for elemMatch on non-arrays too`, async () => { + if (IS_LOKI) return + + let result = await runFilter({ + name: { + elemMatch: { + eq: `The Mad Wax`, + }, + }, + }) + + // Can return more than one node + // Does NOT contain nodes that do not have the field + expect(result.length).toEqual(2) + expect(result[0].name).toEqual(`The Mad Wax`) + expect(result[1].name).toEqual(`The Mad Wax`) + }) + + it(`skips nodes without the field for elemMatch on string`, async () => { + if (IS_LOKI) return + + let result = await runFilter({ + strSecondOnly: { + elemMatch: { + eq: `needle`, + }, + }, + }) + + // Does NOT contain nodes that do not have the field so returns 2nd node + expect(result.length).toEqual(1) + expect(result[0].strSecondOnly).toEqual(`needle`) + }) + + it(`works for elemMatch on number field`, async () => { + if (IS_LOKI) return + + let result = await runFilter({ + float: { + elemMatch: { + eq: 1.5, + }, + }, + }) + + // Does NOT contain nodes that do not have the field + expect(result.length).toEqual(1) + expect(result[0].float).toEqual(1.5) + }) + + it(`handles the nin operator for array [5]`, async () => { + let result = await runFilter({ anArray: { nin: [5] } }) + + // Since the array does not contain `null`, the query should also return the + // nodes that do not have the field at all (NULL). + + expect(result.length).toEqual(2) + result.forEach(edge => { + // Either does not exist or does not contain + expect(edge.anArray === undefined || !edge.anArray.includes(5)).toBe( + true + ) + }) + }) + + it(`handles the nin operator for array [null]`, async () => { + if (IS_LOKI) return + + let result = await runFilter({ nullArray: { nin: [null] } }) + + // Since the array contains `null`, the query should NOT return the + // nodes that do not have the field at all (NULL). + + expect(result.length).toEqual(1) + expect(result[0].nullArray.includes(null)).toBe(false) + }) + + it(`handles the nin operator for strings`, async () => { + let result = await runFilter({ string: { nin: [`b`, `c`] } }) + + expect(result.length).toEqual(1) + result.forEach(edge => { + expect(edge.string).not.toEqual(`b`) + expect(edge.string).not.toEqual(`c`) + }) + }) + + it(`handles the nin operator for ints`, async () => { + let result = await runFilter({ index: { nin: [0, 2] } }) + + expect(result.length).toEqual(1) + result.forEach(edge => { + expect(edge.index).not.toEqual(0) + expect(edge.index).not.toEqual(2) + }) + }) + + it(`handles the nin operator for floats`, async () => { + let result = await runFilter({ float: { nin: [1.5] } }) + + expect(result.length).toEqual(2) + result.forEach(edge => { + // Might not have the property (-> undefined), must not be 1.5 + expect(edge.float).not.toEqual(1.5) + }) + }) + + it(`handles the nin operator for booleans`, async () => { + let result = await runFilter({ boolean: { nin: [true, null] } }) + + // Do not return the node that does not have the field because of `null` + expect(result.length).toEqual(1) + result.forEach(edge => { + // Must have the property, must not be true nor null + expect(edge.boolean !== undefined).toBe(true) + expect(edge.boolean !== true && edge.boolean !== null).toBe(true) + }) + }) + + it(`handles the nin operator for double null`, async () => { + if (IS_LOKI) return + + let result = await runFilter({ nil: { nin: [null, null] } }) + + // Do not return the node that does not have the field because of `null` + expect(result.length).toEqual(1) + result.forEach(edge => { + // Must have the property, must not be null + expect(edge.nil !== undefined).toBe(true) + expect(edge.nil !== null).toBe(true) + }) + }) + + it(`handles the nin operator for null in int+null`, async () => { + if (IS_LOKI) return + + let result = await runFilter({ nil: { nin: [5, null] } }) + + // Do not return the node that does not have the field because of `null` + expect(result.length).toEqual(1) + result.forEach(edge => { + // Must have the property, must not be 5 nor null + expect(edge.nil !== undefined).toBe(true) + expect(edge.nil !== 5 && edge.nil !== null).toBe(true) + }) + }) + + it(`handles the nin operator for int in int+null`, async () => { + let result = await runFilter({ index: { nin: [2, null] } }) + + // Do not return the node that does not have the field because of `null` + expect(result.length).toEqual(2) + result.forEach(edge => { + // Must have the property, must not be 2 nor null + expect(edge.index !== undefined).toBe(true) + expect(edge.index !== 2 && edge.index !== null).toBe(true) + }) + }) + + it(`handles the glob operator`, async () => { + let result = await runFilter({ name: { glob: `*Wax` } }) + + expect(result.length).toEqual(2) + expect(result[0].name).toEqual(`The Mad Wax`) + }) + + it(`filters date fields`, async () => { + let result = await runFilter({ date: { ne: null } }) + + expect(result.length).toEqual(2) + expect(result[0].index).toEqual(0) + expect(result[1].index).toEqual(2) + }) + + it(`handles the eq operator for array field values`, async () => { + const result = await runFilter({ anArray: { eq: 5 } }) + + expect(result.length).toBe(1) + expect(result[0].index).toBe(1) + }) + + it(`handles the ne operator for array field values`, async () => { + const result = await runFilter({ anArray: { ne: 1 } }) + + expect(result.length).toBe(1) + expect(result[0].index).toBe(2) + }) }) - expect(result.length).toEqual(3) - expect(result[0].id).toEqual(`2`) // blue = 10010, id = 2 - expect(result[1].id).toEqual(`1`) // blue = 10010, id = 1 - expect(result[2].id).toEqual(`0`) // blue = 100, id = 0 + describe(`collection fields`, () => { + it(`orders by given field desc with limit`, async () => { + let result = await runQuery({ + limit: 10, + sort: { + fields: [`frontmatter.blue`], + order: [`desc`], + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`1`) + expect(result[1].id).toEqual(`2`) + expect(result[2].id).toEqual(`0`) + }) + + describe(`num, null, and nullable order`, () => { + // This suite asserts the order of a field that is a number vs a field that + // is explicitly set to the value `null`, vs a field that is not set + // (which gets NULL in the database). This should do whatever redux does! + // Exhaustive suite; 2^3 x2 = 12 tests, all cases in asc and desc + + // node 1 2 3 + // - num_null_not 1st node has field set, 2nd set to null, 3rd not set + // - num_not_null etc + // - null_num_not + // - null_not_num + // - not_null_num + // - not_num_null + + it(`sorts num_null_not asc`, async () => { + let result = await runQuery({ + sort: { + fields: [`num_null_not`], + order: [`asc`], + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`0`) + expect(result[1].id).toEqual(`1`) + expect(result[2].id).toEqual(`2`) + }) + + it(`sorts num_null_not desc`, async () => { + let result = await runQuery({ + sort: { + fields: [`num_null_not`], + order: [`desc`], + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`2`) + expect(result[1].id).toEqual(`1`) + expect(result[2].id).toEqual(`0`) + }) + + it(`sorts num_not_null asc`, async () => { + let result = await runQuery({ + sort: { + fields: [`num_not_null`], + order: [`asc`], + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`0`) + expect(result[1].id).toEqual(`2`) + expect(result[2].id).toEqual(`1`) + }) + + it(`sorts num_not_null desc`, async () => { + let result = await runQuery({ + sort: { + fields: [`num_not_null`], + order: [`desc`], + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`1`) + expect(result[1].id).toEqual(`2`) + expect(result[2].id).toEqual(`0`) + }) + + it(`sorts null_num_not asc`, async () => { + let result = await runQuery({ + sort: { + fields: [`null_num_not`], + order: [`asc`], + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`1`) + expect(result[1].id).toEqual(`0`) + expect(result[2].id).toEqual(`2`) + }) + + it(`sorts null_num_not desc`, async () => { + let result = await runQuery({ + sort: { + fields: [`null_num_not`], + order: [`desc`], + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`2`) + expect(result[1].id).toEqual(`0`) + expect(result[2].id).toEqual(`1`) + }) + + it(`sorts null_not_num asc`, async () => { + let result = await runQuery({ + sort: { + fields: [`null_not_num`], + order: [`asc`], + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`2`) + expect(result[1].id).toEqual(`0`) + expect(result[2].id).toEqual(`1`) + }) + + it(`sorts null_not_num desc`, async () => { + let result = await runQuery({ + sort: { + fields: [`null_not_num`], + order: [`desc`], + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`1`) + expect(result[1].id).toEqual(`0`) + expect(result[2].id).toEqual(`2`) + }) + + it(`sorts not_null_num asc`, async () => { + let result = await runQuery({ + sort: { + fields: [`not_null_num`], + order: [`asc`], + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`2`) + expect(result[1].id).toEqual(`1`) + expect(result[2].id).toEqual(`0`) + }) + + it(`sorts not_null_num desc`, async () => { + let result = await runQuery({ + sort: { + fields: [`not_null_num`], + order: [`desc`], + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`0`) + expect(result[1].id).toEqual(`1`) + expect(result[2].id).toEqual(`2`) + }) + + it(`sorts not_num_null asc`, async () => { + let result = await runQuery({ + sort: { + fields: [`not_num_null`], + order: [`asc`], + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`1`) + expect(result[1].id).toEqual(`2`) + expect(result[2].id).toEqual(`0`) + }) + + it(`sorts not_num_null desc`, async () => { + let result = await runQuery({ + sort: { + fields: [`not_num_null`], + order: [`desc`], + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`0`) + expect(result[1].id).toEqual(`2`) + expect(result[2].id).toEqual(`1`) + }) + }) + + describe(`string, null, and nullable order`, () => { + // This suite asserts the order of a field that is a string vs a field that + // is explicitly set to the value `null`, vs a field that is not set + // (which gets NULL in the database). This should do whatever redux does! + // Exhaustive suite; 2^3 x2 = 12 tests, all cases in asc and desc + + // node 1 2 3 + // - str_null_not 1st node has field set, 2nd set to null, 3rd not set + // - str_not_null etc + // - null_str_not + // - null_not_str + // - not_null_str + // - not_str_null + + it(`sorts str_null_not asc`, async () => { + let result = await runQuery({ + sort: { + fields: [`str_null_not`], + order: [`asc`], + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`0`) + expect(result[1].id).toEqual(`1`) + expect(result[2].id).toEqual(`2`) + }) + + it(`sorts str_null_not desc`, async () => { + let result = await runQuery({ + sort: { + fields: [`str_null_not`], + order: [`desc`], + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`2`) + expect(result[1].id).toEqual(`1`) + expect(result[2].id).toEqual(`0`) + }) + + it(`sorts str_not_null asc`, async () => { + let result = await runQuery({ + sort: { + fields: [`str_not_null`], + order: [`asc`], + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`0`) + expect(result[1].id).toEqual(`2`) + expect(result[2].id).toEqual(`1`) + }) + + it(`sorts str_not_null desc`, async () => { + let result = await runQuery({ + sort: { + fields: [`str_not_null`], + order: [`desc`], + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`1`) + expect(result[1].id).toEqual(`2`) + expect(result[2].id).toEqual(`0`) + }) + + it(`sorts null_str_not asc`, async () => { + let result = await runQuery({ + sort: { + fields: [`null_str_not`], + order: [`asc`], + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`1`) + expect(result[1].id).toEqual(`0`) + expect(result[2].id).toEqual(`2`) + }) + + it(`sorts null_str_not desc`, async () => { + let result = await runQuery({ + sort: { + fields: [`null_str_not`], + order: [`desc`], + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`2`) + expect(result[1].id).toEqual(`0`) + expect(result[2].id).toEqual(`1`) + }) + + it(`sorts null_not_str asc`, async () => { + let result = await runQuery({ + sort: { + fields: [`null_not_str`], + order: [`asc`], + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`2`) + expect(result[1].id).toEqual(`0`) + expect(result[2].id).toEqual(`1`) + }) + + it(`sorts null_not_str desc`, async () => { + let result = await runQuery({ + sort: { + fields: [`null_not_str`], + order: [`desc`], + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`1`) + expect(result[1].id).toEqual(`0`) + expect(result[2].id).toEqual(`2`) + }) + + it(`sorts not_null_str asc`, async () => { + let result = await runQuery({ + sort: { + fields: [`not_null_str`], + order: [`asc`], + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`2`) + expect(result[1].id).toEqual(`1`) + expect(result[2].id).toEqual(`0`) + }) + + it(`sorts not_null_str desc`, async () => { + let result = await runQuery({ + sort: { + fields: [`not_null_str`], + order: [`desc`], + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`0`) + expect(result[1].id).toEqual(`1`) + expect(result[2].id).toEqual(`2`) + }) + + it(`sorts not_str_null asc`, async () => { + let result = await runQuery({ + sort: { + fields: [`not_str_null`], + order: [`asc`], + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`1`) + expect(result[1].id).toEqual(`2`) + expect(result[2].id).toEqual(`0`) + }) + + it(`sorts not_str_null desc`, async () => { + let result = await runQuery({ + sort: { + fields: [`not_str_null`], + order: [`desc`], + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`0`) + expect(result[1].id).toEqual(`2`) + expect(result[2].id).toEqual(`1`) + }) + }) + + describe(`obj, null, and nullable order`, () => { + // This suite asserts the order of a field that is a object vs a field that + // is explicitly set to the value `null`, vs a field that is not set + // (which gets NULL in the database). This should do whatever redux does! + // Exhaustive suite; 2^3 x2 = 12 tests, all cases in asc and desc + + // node 1 2 3 + // - obj_null_not 1st node has field set, 2nd set to null, 3rd not set + // - obj_not_null etc + // - null_obj_not + // - null_not_obj + // - not_null_obj + // - not_obj_null + + it(`sorts obj_null_not asc`, async () => { + let result = await runQuery({ + sort: { + fields: [`obj_null_not`], + order: [`asc`], + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`0`) + expect(result[1].id).toEqual(`1`) + expect(result[2].id).toEqual(`2`) + }) + + it(`sorts obj_null_not desc`, async () => { + let result = await runQuery({ + sort: { + fields: [`obj_null_not`], + order: [`desc`], + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`2`) + expect(result[1].id).toEqual(`1`) + expect(result[2].id).toEqual(`0`) + }) + + it(`sorts obj_not_null asc`, async () => { + let result = await runQuery({ + sort: { + fields: [`obj_not_null`], + order: [`asc`], + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`0`) + expect(result[1].id).toEqual(`2`) + expect(result[2].id).toEqual(`1`) + }) + + it(`sorts obj_not_null desc`, async () => { + let result = await runQuery({ + sort: { + fields: [`obj_not_null`], + order: [`desc`], + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`1`) + expect(result[1].id).toEqual(`2`) + expect(result[2].id).toEqual(`0`) + }) + + it(`sorts null_obj_not asc`, async () => { + let result = await runQuery({ + sort: { + fields: [`null_obj_not`], + order: [`asc`], + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`1`) + expect(result[1].id).toEqual(`0`) + expect(result[2].id).toEqual(`2`) + }) + + it(`sorts null_obj_not desc`, async () => { + let result = await runQuery({ + sort: { + fields: [`null_obj_not`], + order: [`desc`], + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`2`) + expect(result[1].id).toEqual(`0`) + expect(result[2].id).toEqual(`1`) + }) + + it(`sorts null_not_obj asc`, async () => { + let result = await runQuery({ + sort: { + fields: [`null_not_obj`], + order: [`asc`], + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`2`) + expect(result[1].id).toEqual(`0`) + expect(result[2].id).toEqual(`1`) + }) + + it(`sorts null_not_obj desc`, async () => { + let result = await runQuery({ + sort: { + fields: [`null_not_obj`], + order: [`desc`], + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`1`) + expect(result[1].id).toEqual(`0`) + expect(result[2].id).toEqual(`2`) + }) + + it(`sorts not_null_obj asc`, async () => { + let result = await runQuery({ + sort: { + fields: [`not_null_obj`], + order: [`asc`], + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`2`) + expect(result[1].id).toEqual(`1`) + expect(result[2].id).toEqual(`0`) + }) + + it(`sorts not_null_obj desc`, async () => { + let result = await runQuery({ + sort: { + fields: [`not_null_obj`], + order: [`desc`], + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`0`) + expect(result[1].id).toEqual(`1`) + expect(result[2].id).toEqual(`2`) + }) + + it(`sorts not_obj_null asc`, async () => { + let result = await runQuery({ + sort: { + fields: [`not_obj_null`], + order: [`asc`], + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`1`) + expect(result[1].id).toEqual(`2`) + expect(result[2].id).toEqual(`0`) + }) + + it(`sorts not_obj_null desc`, async () => { + let result = await runQuery({ + sort: { + fields: [`not_obj_null`], + order: [`desc`], + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`0`) + expect(result[1].id).toEqual(`2`) + expect(result[2].id).toEqual(`1`) + }) + }) + + it(`sorts results with desc has null fields first vs obj second`, async () => { + let result = await runQuery({ + limit: 10, + sort: { + fields: [`waxOnly`], + order: [`desc`], + }, + }) + + // 0 doesnt have it, 1 has it as an object, 2 has it as null + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`0`) + expect(result[1].id).toEqual(`2`) + expect(result[2].id).toEqual(`1`) + }) + + it(`sorts results with asc has null fields last vs obj first`, async () => { + let result = await runQuery({ + limit: 10, + sort: { + fields: [`waxOnly`], + order: [`asc`], + }, + }) + + // 0 doesnt have it, 1 has it as an object, 2 has it as null + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`1`) + expect(result[1].id).toEqual(`2`) + expect(result[2].id).toEqual(`0`) + }) + + it(`applies specified sort order, and sorts asc by default`, async () => { + let result = await runQuery({ + limit: 10, + sort: { + fields: [`frontmatter.blue`, `id`], + order: [`desc`], // `id` field will be sorted asc + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`1`) // blue = 10010, id = 1 + expect(result[1].id).toEqual(`2`) // blue = 10010, id = 2 + expect(result[2].id).toEqual(`0`) // blue = 100, id = 0 + }) + + it(`applies specified sort order per field`, async () => { + let result = await runQuery({ + limit: 10, + sort: { + fields: [`frontmatter.blue`, `id`], + order: [`desc`, `desc`], // `id` field will be sorted desc + }, + }) + + expect(result.length).toEqual(3) + expect(result[0].id).toEqual(`2`) // blue = 10010, id = 2 + expect(result[1].id).toEqual(`1`) // blue = 10010, id = 1 + expect(result[2].id).toEqual(`0`) // blue = 100, id = 0 + }) + }) }) }) diff --git a/packages/gatsby/src/schema/node-model.js b/packages/gatsby/src/schema/node-model.js index 493f1ac8a3a5b..a7c7fd8853afd 100644 --- a/packages/gatsby/src/schema/node-model.js +++ b/packages/gatsby/src/schema/node-model.js @@ -78,14 +78,14 @@ class LocalNodeModel { * Replace the cache either with the value passed on (mainly for tests) or * an empty new Map. * - * @param {undefined | Map> | Map>} map + * @param {undefined | null | FiltersCache} map * (This cached is used in redux/nodes.js and caches a set of buckets (Sets) * of Nodes based on filter and tracks this for each set of types which are * actually queried. If the filter targets `id` directly, only one Node is - * cached instead of a Set of Nodes. + * cached instead of a Set of Nodes. If null, don't create or use a cache. */ replaceTypeKeyValueCache(map = new Map()) { - this._typedKeyValueIndexes = map // See redux/nodes.js for usage + this._filtersCache = map // See redux/nodes.js for usage } withContext(context) { @@ -237,7 +237,7 @@ class LocalNodeModel { gqlType, resolvedFields: fieldsToResolve, nodeTypeNames, - typedKeyValueIndexes: this._typedKeyValueIndexes, + filtersCache: this._filtersCache, stats, }) diff --git a/packages/gatsby/src/types.ts b/packages/gatsby/src/types.ts index 1835e473fc1dc..66727d89500b5 100644 --- a/packages/gatsby/src/types.ts +++ b/packages/gatsby/src/types.ts @@ -2,7 +2,7 @@ export interface IMatch { id: string context: { sourceMessage: string - [key: string]: string + [key: string]: string | boolean } error?: Error | undefined [key: string]: unknown diff --git a/packages/gatsby/src/utils/webpack.config.js b/packages/gatsby/src/utils/webpack.config.js index a37c90f315bfc..993055a638cda 100644 --- a/packages/gatsby/src/utils/webpack.config.js +++ b/packages/gatsby/src/utils/webpack.config.js @@ -530,10 +530,13 @@ module.exports = async ( reuseExistingChunk: true, }, commons: { + // only bundle non-async modules + chunks: `initial`, name: `commons`, - // if a chunk is used on all components we put it in commons - minChunks: componentsCount, + // if a chunk is used on all components we put it in commons (we need at least 2 components) + minChunks: Math.max(componentsCount, 2), priority: 20, + reuseExistingChunk: true, }, // If a chunk is used in at least 2 components we create a separate chunk shared: { diff --git a/scripts/check-ts.js b/scripts/check-ts.js index 9aca0ca9641d9..68df1d3b4e551 100644 --- a/scripts/check-ts.js +++ b/scripts/check-ts.js @@ -15,7 +15,8 @@ const execa = require(`execa`) console.log(`TS Check: Running...`) -const PACKAGES_DIR = path.resolve(__dirname, `../packages`) +const toAbsolutePath = relativePath => path.join(__dirname, `..`, relativePath) +const PACKAGES_DIR = toAbsolutePath(`/packages`) const filterPackage = yargs.argv._[0] @@ -59,9 +60,34 @@ if (filterPackage) { } } +let totalTsFiles = 0 +let totalJsFiles = 0 + packagesWithTs.forEach(project => { + const tsFiles = glob.sync( + toAbsolutePath( + `./packages/${project.split(/.*packages[/\\]/)[1]}/src/**/*.ts` + ) + ).length + + const jsFiles = glob.sync( + toAbsolutePath( + `./packages/${project.split(/.*packages[/\\]/)[1]}/src/**/*.js` + ) + ).length + + totalTsFiles += tsFiles + totalJsFiles += jsFiles + + const percentConverted = Number( + ((tsFiles / (jsFiles + tsFiles)) * 100).toFixed(1) + ) + console.log( - `TS Check: Checking ./packages/${project.split(/.*packages[/\\]/)[1]}` + `TS Check: Checking ./packages/${project.split(/.*packages[/\\]/)[1]}`, + `\n - TS Files: ${tsFiles}`, + `\n - JS Files: ${jsFiles}`, + `\n - Percent Converted: ${percentConverted}%` ) const args = [ @@ -84,3 +110,15 @@ packagesWithTs.forEach(project => { }) console.log(`TS Check: Success`) + +if (!filterPackage) { + const percentConverted = Number( + ((totalTsFiles / (totalJsFiles + totalTsFiles)) * 100).toFixed(1) + ) + + console.log( + ` - Total TS Files: ${totalJsFiles}`, + `\n - Total JS Files: ${totalJsFiles}`, + `\n - Percent Converted: ${percentConverted}%` + ) +} diff --git a/scripts/e2e-test.sh b/scripts/e2e-test.sh index 1532bb95a2f0c..509f900a73c54 100755 --- a/scripts/e2e-test.sh +++ b/scripts/e2e-test.sh @@ -3,7 +3,7 @@ SRC_PATH=$1 CUSTOM_COMMAND="${2:-yarn test}" GATSBY_PATH="${CIRCLE_WORKING_DIRECTORY:-../../}" -# cypress docker does not support sudo and do not need it but the default node executor does +# cypress docker does not support sudo and does not need it, but the default node executor does command -v sudo && sudo npm install -g gatsby-dev-cli || npm install -g gatsby-dev-cli && # setting up child integration test link to gatsby packages diff --git a/scripts/i18n/README.md b/scripts/i18n/README.md index 6b7654ccce566..8211f63214653 100644 --- a/scripts/i18n/README.md +++ b/scripts/i18n/README.md @@ -70,3 +70,13 @@ When run, the script will: - Pulls the latest version of `gatsby-i18n-source`. - Creates a "sync" pull request that updates all files that do not contain conflicts from the merge. - Creates a "conflicts" pull request that contains all merge conflicts, with instructions on how to resolve them. + +### `run-all` + +Usage: + +```shell +yarn run-all [script name] +``` + +The `run-all` script runs the script provided in the argument across all languages for which there are translations of gatbyjs.org, listed in /www/src/i18n.json. diff --git a/scripts/i18n/package.json b/scripts/i18n/package.json index b8795edc8fef0..1c60f22081e4f 100644 --- a/scripts/i18n/package.json +++ b/scripts/i18n/package.json @@ -4,6 +4,7 @@ "description": "Scripts for gatsby internationalization", "scripts": { "create": "node ./create.js", + "run-all": "node ./run-all.js", "sync": "node ./sync.js", "update-source": "node ./update-source.js" }, diff --git a/scripts/i18n/run-all.js b/scripts/i18n/run-all.js new file mode 100644 index 0000000000000..3cac7e235ac4a --- /dev/null +++ b/scripts/i18n/run-all.js @@ -0,0 +1,21 @@ +// Run the provided script on all valid repos +const fs = require(`fs`) +const log4js = require(`log4js`) +const shell = require(`shelljs`) +let logger = log4js.getLogger(`run-all`) + +require(`dotenv`).config() + +function runAll(script) { + if (!script) { + logger.error(`Usage: yarn run-all `) + process.exit(1) + } + const langs = JSON.parse(fs.readFileSync(`../../www/i18n.json`)) + for (const { code } of langs) { + shell.exec(`yarn ${script} ${code}`) + } +} + +const [script] = process.argv.slice(2) +runAll(script) diff --git a/scripts/i18n/sync.js b/scripts/i18n/sync.js index fed6e12634fc8..a59690c70fda6 100644 --- a/scripts/i18n/sync.js +++ b/scripts/i18n/sync.js @@ -1,11 +1,12 @@ const log4js = require(`log4js`) const shell = require(`shelljs`) -const { graphql } = require(`@octokit/graphql`) +const { graphql: baseGraphql } = require(`@octokit/graphql`) let logger = log4js.getLogger(`sync`) require(`dotenv`).config() -const host = `https://github.com` +const token = process.env.GITHUB_API_TOKEN +const host = `https://${token}@github.com` const cacheDir = `.cache` const owner = `gatsbyjs` const repoBase = `gatsby` @@ -14,6 +15,7 @@ const sourceRepo = `gatsby-i18n-source` const sourceRepoUrl = `${host}/${owner}/${sourceRepo}` const sourceRepoGitUrl = `${sourceRepoUrl}.git` +const syncLabelName = `sync` // get the git short hash function getShortHash(hash) { @@ -33,25 +35,68 @@ function cloneOrUpdateRepo(repoName, repoUrl) { } } +// Run the query and exit if there are errors +async function graphql(query, params) { + const graphqlWithAuth = baseGraphql.defaults({ + headers: { + authorization: `token ${token}`, + }, + }) + try { + return await graphqlWithAuth(query, params) + } catch (error) { + logger.error(error.message) + return process.exit(1) + } +} + async function getRepository(owner, name) { const { repository } = await graphql( ` - query($owner: String!, $name: String!) { + query($owner: String!, $name: String!, $syncLabel: String!) { repository(owner: $owner, name: $name) { id + syncPullRequests: pullRequests(labels: [$syncLabel], first: 1) { + nodes { + id + } + } + syncLabel: label(name: $syncLabel) { + id + } } } `, { - headers: { - authorization: `token ${process.env.GITHUB_ADMIN_AUTH_TOKEN}`, - }, owner, name, + syncLabel: syncLabelName, } ) return repository } + +async function createLabel(input) { + const { createLabel } = await graphql( + ` + mutation($input: CreateLabelInput!) { + createLabel(input: $input) { + label { + id + } + } + } + `, + { + headers: { + accept: `application/vnd.github.bane-preview+json`, + }, + input, + } + ) + return createLabel.label +} + async function createPullRequest(input) { const { createPullRequest } = await graphql( ` @@ -66,7 +111,6 @@ async function createPullRequest(input) { `, { headers: { - authorization: `token ${process.env.GITHUB_BOT_AUTH_TOKEN}`, accept: `application/vnd.github.shadow-cat-preview+json`, }, input, @@ -75,6 +119,27 @@ async function createPullRequest(input) { return createPullRequest.pullRequest } +async function addLabelToPullRequest(pullRequest, label) { + await graphql( + ` + mutation($input: AddLabelsToLabelableInput!) { + addLabelsToLabelable(input: $input) { + clientMutationId + } + } + `, + { + headers: { + accept: `application/vnd.github.bane-preview+json`, + }, + input: { + labelableId: pullRequest.id, + labelIds: [label.id], + }, + } + ) +} + function conflictPRBody(conflictFiles, comparisonUrl, prNumber) { return ` Sync conflicts with the source repo. Please update the translations based on updated source content. @@ -126,7 +191,32 @@ async function syncTranslationRepo(code) { shell.exec(`git remote add source ${sourceRepoGitUrl}`) shell.exec(`git fetch source master`) - // TODO don't run the sync script if there is a current PR from the bot + const repository = await getRepository(owner, transRepoName) + + if (repository.syncPullRequests.nodes.length > 0) { + logger.info( + `There are currently open sync pull requests. Please ask the language maintainers to merge the existing PR(s) in before opening another one. Exiting...` + ) + process.exit(0) + } + + logger.info(`No currently open sync pull requests.`) + + let syncLabel + if (!repository.syncLabel) { + logger.info( + `Repository does not have a "${syncLabelName}" label. Creating one...` + ) + syncLabel = await createLabel({ + repositoryId: repository.id, + name: syncLabelName, + description: `Sync with translation source. Used by @gatsbybot to track open sync pull requests.`, + color: `fbca04`, + }) + } else { + logger.info(`Repository has an existing ${syncLabelName} label.`) + syncLabel = repository.syncLabel + } // TODO exit early if this fails // Compare these changes @@ -152,15 +242,13 @@ async function syncTranslationRepo(code) { // Remove files that are deleted by upstream // https://stackoverflow.com/a/54232519 shell.exec(`git diff --name-only --diff-filter=U | xargs git rm`) - shell.exec(`git ci --no-edit`) + shell.exec(`git commit --no-edit`) shell.exec(`git push -u origin ${syncBranch}`) - const repository = await getRepository(owner, transRepoName) - logger.info(`Creating sync pull request`) // TODO if there is already an existing PR, don't create a new one and exit early - const { number: syncPRNumber } = await createPullRequest({ + const syncPR = await createPullRequest({ repositoryId: repository.id, baseRefName: `master`, headRefName: syncBranch, @@ -168,6 +256,7 @@ async function syncTranslationRepo(code) { body: syncPRBody(), maintainerCanModify: true, }) + await addLabelToPullRequest(syncPR, syncLabel) // if we successfully publish the PR, pull again and create a new PR -- shell.exec(`git checkout master`) @@ -235,15 +324,16 @@ async function syncTranslationRepo(code) { logger.info(`Creating conflicts pull request`) // TODO assign codeowners as reviewers - await createPullRequest({ + const conflictsPR = await createPullRequest({ repositoryId: repository.id, baseRefName: `master`, headRefName: conflictBranch, title: `(sync) Resolve conflicts with ${sourceRepo} @ ${shortHash}`, - body: conflictPRBody(conflictFiles, comparisonUrl, syncPRNumber), + body: conflictPRBody(conflictFiles, comparisonUrl, syncPR.number), maintainerCanModify: true, draft: true, }) + await addLabelToPullRequest(conflictsPR, syncLabel) } const [langCode] = process.argv.slice(2) diff --git a/starters/hello-world/gatsby-config.js b/starters/hello-world/gatsby-config.js index 823925db4c737..4172a129ff405 100644 --- a/starters/hello-world/gatsby-config.js +++ b/starters/hello-world/gatsby-config.js @@ -6,4 +6,5 @@ module.exports = { /* Your site config here */ + plugins: [] } diff --git a/www/gatsby-config.js b/www/gatsby-config.js index 4bed679173af9..2e4add583a1fe 100644 --- a/www/gatsby-config.js +++ b/www/gatsby-config.js @@ -2,7 +2,7 @@ const path = require(`path`) require(`dotenv`).config({ path: `.env.${process.env.NODE_ENV}`, }) -const { langCodes } = require(`./src/utils/i18n`) +const { i18nEnabled, langCodes } = require(`./src/utils/i18n`) const GA = { identifier: `UA-93349937-5`, @@ -53,12 +53,8 @@ if (process.env.AIRTABLE_API_KEY) { }) } -// true if `env.LOCALES` has a defined list of languages -if (langCodes.length > 0) { - const naughtyFiles = [ - `docs/docs/graphql-api.md`, - `docs/docs/data-fetching.md`, - ] +if (i18nEnabled) { + const naughtyFiles = [`docs/docs/data-fetching.md`] dynamicPlugins.push( ...langCodes.map(code => ({ resolve: `gatsby-source-git`, diff --git a/www/gatsby-node.js b/www/gatsby-node.js index 84f4ee66c88a5..643f31fd51806 100644 --- a/www/gatsby-node.js +++ b/www/gatsby-node.js @@ -5,6 +5,7 @@ const child_process = require(`child_process`) const startersRedirects = require(`./starter-redirects.json`) const yaml = require(`js-yaml`) const redirects = yaml.load(fs.readFileSync(`./redirects.yaml`)) +const { i18nEnabled } = require(`./src/utils/i18n`) const docs = require(`./src/utils/node/docs.js`) const showcase = require(`./src/utils/node/showcase.js`) @@ -41,7 +42,7 @@ exports.onCreateNode = helpers => { exports.onPostBootstrap = () => { // Compile language strings if locales are enabled - if (!!process.env.LOCALES) { + if (i18nEnabled) { child_process.execSync(`yarn lingui:build`) } } diff --git a/www/src/components/I18nContext.js b/www/src/components/I18nContext.js index 8ba94e01e938b..67a21224916e4 100644 --- a/www/src/components/I18nContext.js +++ b/www/src/components/I18nContext.js @@ -1,5 +1,5 @@ import React from "react" -import { defaultLang } from "../utils/i18n" +import { i18nEnabled, defaultLang } from "../utils/i18n" import { I18nProvider as LinguiProvider } from "@lingui/react" // Lingui doesn't give access to the locale, so we need our own provider @@ -7,7 +7,7 @@ import { I18nProvider as LinguiProvider } from "@lingui/react" const LocaleContext = React.createContext(defaultLang) export function I18nProvider({ locale = defaultLang, children }) { - const catalog = !!process.env.LOCALES + const catalog = i18nEnabled ? require(`../data/locales/${locale}/messages.js`) : {} return ( diff --git a/www/src/components/package-readme.js b/www/src/components/package-readme.js index 569d2fd420b79..34a984ea6b255 100644 --- a/www/src/components/package-readme.js +++ b/www/src/components/package-readme.js @@ -10,10 +10,51 @@ import MarkdownPageFooter from "./markdown-page-footer" import FooterLinks from "./shared/footer-links" import { GoMarkGithub as GithubIcon } from "react-icons/go" import GatsbyIcon from "./gatsby-monogram" +import { FaUsers as CommunityIcon } from "react-icons/fa" + +const GatsbyPluginBadge = ({ isOfficial }) => { + const Icon = isOfficial ? GatsbyIcon : CommunityIcon + const title = isOfficial + ? "Official Gatsby Plugin" + : "Community Gatsby Plugin" + const text = isOfficial ? `Official Plugin` : `Community Plugin` + + return ( +
+ + + + {text} +
+ ) +} const PackageReadMe = props => { const { page, packageName, excerpt, html, githubUrl, timeToRead } = props const metaExcerpt = excerpt || `Plugin information for ${packageName}` + const isOfficial = + githubUrl.indexOf(`https://github.com/gatsbyjs/gatsby`) === 0 && + packageName[0] !== `@` return ( @@ -45,35 +86,7 @@ const PackageReadMe = props => { justifyContent: `space-between`, }} > - {githubUrl.indexOf(`https://github.com/gatsbyjs/gatsby`) === 0 && - packageName[0] !== `@` && ( -
- - - - Official Plugin -
- )} + { }} > {hit.repository && - hit.name[0] !== `@` && - hit.repository.url.indexOf(`https://github.com/gatsbyjs/gatsby`) === - 0 && ( - - - - )} + hit.name[0] !== `@` && + hit.repository.url.indexOf(`https://github.com/gatsbyjs/gatsby`) === + 0 ? ( + + + + ) : ( + + + + )} lang.code) +const i18nEnabled = langs.length > 0 + function isDefaultLang(locale) { return locale === defaultLang } @@ -78,6 +80,7 @@ function getLocaleAndBasePath(path, codes = langCodes) { } module.exports = { + i18nEnabled, langCodes, langs, defaultLang, diff --git a/yarn.lock b/yarn.lock index 219b0871cd64c..38bc2bbb027e9 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3891,6 +3891,11 @@ resolved "https://registry.yarnpkg.com/@types/color-name/-/color-name-1.1.1.tgz#1c1261bbeaa10a8055bbc5d8ab84b7b2afc846a0" integrity sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ== +"@types/common-tags@^1.8.0": + version "1.8.0" + resolved "https://registry.yarnpkg.com/@types/common-tags/-/common-tags-1.8.0.tgz#79d55e748d730b997be5b7fce4b74488d8b26a6b" + integrity sha512-htRqZr5qn8EzMelhX/Xmx142z218lLyGaeZ3YR8jlze4TATRU9huKKvuBmAJEW4LCC4pnY1N6JAm6p85fMHjhg== + "@types/configstore@^2.1.1": version "2.1.1" resolved "https://registry.yarnpkg.com/@types/configstore/-/configstore-2.1.1.tgz#cd1e8553633ad3185c3f2f239ecff5d2643e92b6" @@ -4170,6 +4175,13 @@ "@types/glob" "*" "@types/node" "*" +"@types/semver@^7.1.0": + version "7.1.0" + resolved "https://registry.yarnpkg.com/@types/semver/-/semver-7.1.0.tgz#c8c630d4c18cd326beff77404887596f96408408" + integrity sha512-pOKLaubrAEMUItGNpgwl0HMFPrSAFic8oSVIvfu1UwcgGNmNyK9gyhBHKmBnUTwwVvpZfkzUC0GaMgnL6P86uA== + dependencies: + "@types/node" "*" + "@types/serve-static@*": version "1.13.3" resolved "https://registry.yarnpkg.com/@types/serve-static/-/serve-static-1.13.3.tgz#eb7e1c41c4468272557e897e9171ded5e2ded9d1" @@ -4178,6 +4190,11 @@ "@types/express-serve-static-core" "*" "@types/mime" "*" +"@types/signal-exit@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@types/signal-exit/-/signal-exit-3.0.0.tgz#75e3b17660cf1f6c6cb8557675b4e680e43bbf36" + integrity sha512-MaJ+16SOXz0Z27EMf3d88+B6UDglq1sn140a+5X/ROLkIcEfRq0CPg+1B2efF1GXQn4n+aKH4ti2hHG4Ya+Dzg== + "@types/socket.io@^2.1.4": version "2.1.4" resolved "https://registry.yarnpkg.com/@types/socket.io/-/socket.io-2.1.4.tgz#674e7bc193c5ccdadd4433f79f3660d31759e9ac"