From 6dd12703f4eb333028575615d055788cedb9d195 Mon Sep 17 00:00:00 2001 From: Wojciech Maj Date: Mon, 19 Feb 2024 20:51:31 +0000 Subject: [PATCH] In Node.js v10.12.0, `recursive` option was added to [`fs.mkdirSync`](https://nodejs.org/api/fs.html#fsmkdirsyncpath-options). This option allows you to create a directory and all its parent directories if they do not exist. In Node.js v14.14.0, [`fs.rmSync`](https://nodejs.org/api/fs.html#fsrmsyncpath-options) was added. It's a built-in module that removes files and directories. The `recursive` option is used to remove directories and their contents. Since this repository requires Node.js 18 and up, it's safe to assume `mkdirp` and `rimraf` can be safely replaced, thus reducing the number of dependencies in this project. --- package-lock.json | 33 ------------------- package.json | 2 -- src/audit-logs/scripts/sync.js | 5 ++- .../lib/update-markdown.js | 10 +++--- .../tests/update-markdown.js | 5 ++- src/codeql-cli/scripts/sync.js | 10 +++--- .../scripts/symlink-from-local-repo.js | 3 +- src/frame/tests/get-remote-json.js | 3 +- src/ghes-releases/scripts/archive-version.js | 3 +- .../scripts/remove-static-files.js | 5 ++- .../scripts/sync-automated-pipeline-data.js | 12 +++---- src/github-apps/scripts/sync.js | 5 ++- src/graphql/scripts/sync.js | 3 +- src/rest/scripts/update-files.js | 18 +++++----- src/rest/scripts/utils/sync.js | 5 ++- src/tests/scripts/copy-fixture-data.js | 3 +- src/webhooks/scripts/sync.js | 5 ++- 17 files changed, 38 insertions(+), 92 deletions(-) diff --git a/package-lock.json b/package-lock.json index 7fb737c7c111..91129769c06c 100644 --- a/package-lock.json +++ b/package-lock.json @@ -142,14 +142,12 @@ "markdownlint-rule-search-replace": "^1.2.0", "mdast-util-gfm-table": "^2.0.0", "micromark-extension-gfm-table": "^2.0.0", - "mkdirp": "^3.0.0", "mockdate": "^3.0.5", "nock": "^13.5.0", "nodemon": "3.0.3", "npm-merge-driver-install": "^3.0.0", "nth-check": "2.1.1", "prettier": "^3.2.4", - "rimraf": "^5.0.0", "robots-parser": "^3.0.0", "sass": "^1.52.3", "start-server-and-test": "^2.0.3", @@ -11778,20 +11776,6 @@ "node": ">=8" } }, - "node_modules/mkdirp": { - "version": "3.0.0", - "dev": true, - "license": "MIT", - "bin": { - "mkdirp": "dist/cjs/src/bin.js" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/mockdate": { "version": "3.0.5", "dev": true, @@ -14058,23 +14042,6 @@ "integrity": "sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA==", "dev": true }, - "node_modules/rimraf": { - "version": "5.0.0", - "dev": true, - "license": "ISC", - "dependencies": { - "glob": "^10.0.0" - }, - "bin": { - "rimraf": "dist/cjs/src/bin.js" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/robots-parser": { "version": "3.0.0", "dev": true, diff --git a/package.json b/package.json index 7664fb2d09ab..371a999e34f0 100644 --- a/package.json +++ b/package.json @@ -363,14 +363,12 @@ "markdownlint-rule-search-replace": "^1.2.0", "mdast-util-gfm-table": "^2.0.0", "micromark-extension-gfm-table": "^2.0.0", - "mkdirp": "^3.0.0", "mockdate": "^3.0.5", "nock": "^13.5.0", "nodemon": "3.0.3", "npm-merge-driver-install": "^3.0.0", "nth-check": "2.1.1", "prettier": "^3.2.4", - "rimraf": "^5.0.0", "robots-parser": "^3.0.0", "sass": "^1.52.3", "start-server-and-test": "^2.0.3", diff --git a/src/audit-logs/scripts/sync.js b/src/audit-logs/scripts/sync.js index 54b842b27695..8187efbde053 100755 --- a/src/audit-logs/scripts/sync.js +++ b/src/audit-logs/scripts/sync.js @@ -10,8 +10,7 @@ * per page. */ import { existsSync } from 'fs' -import { readFile, writeFile } from 'fs/promises' -import { mkdirp } from 'mkdirp' +import { mkdir, readFile, writeFile } from 'fs/promises' import path from 'path' import { getContents, getCommitSha } from '#src/workflows/git-utils.js' @@ -184,7 +183,7 @@ async function main() { const auditLogVersionDirPath = path.join(AUDIT_LOG_DATA_DIR, version) if (!existsSync(auditLogVersionDirPath)) { - await mkdirp(auditLogVersionDirPath) + await mkdir(auditLogVersionDirPath, { recursive: true }) } auditLogTypes.forEach(async (type) => { diff --git a/src/automated-pipelines/lib/update-markdown.js b/src/automated-pipelines/lib/update-markdown.js index d5681194e0d8..9de08ab2ed5e 100644 --- a/src/automated-pipelines/lib/update-markdown.js +++ b/src/automated-pipelines/lib/update-markdown.js @@ -1,10 +1,8 @@ import walk from 'walk-sync' -import { existsSync, lstatSync, unlinkSync } from 'fs' +import { existsSync, lstatSync, unlinkSync, rmSync } from 'fs' import path from 'path' -import { readFile, writeFile, readdir } from 'fs/promises' +import { mkdir, readFile, writeFile, readdir } from 'fs/promises' import matter from 'gray-matter' -import { rimraf } from 'rimraf' -import { mkdirp } from 'mkdirp' import { difference, isEqual } from 'lodash-es' import { allVersions } from '#src/versions/lib/all-versions.js' @@ -139,7 +137,7 @@ async function updateDirectory( const initialDirectoryListing = await getDirectoryInfo(directory) // If there are no children on disk, remove the directory if (initialDirectoryListing.directoryContents.length === 0 && !rootDirectoryOnly) { - rimraf(directory) + rmSync(directory, { recursive: true, force: true }) return } @@ -430,7 +428,7 @@ function isRootIndexFile(indexFile) { // Creates a new directory if it doesn't exist async function createDirectory(targetDirectory) { if (!existsSync(targetDirectory)) { - await mkdirp(targetDirectory) + await mkdir(targetDirectory, { recursive: true }) } } diff --git a/src/automated-pipelines/tests/update-markdown.js b/src/automated-pipelines/tests/update-markdown.js index 213fbd4bfbd5..529361454786 100644 --- a/src/automated-pipelines/tests/update-markdown.js +++ b/src/automated-pipelines/tests/update-markdown.js @@ -1,8 +1,7 @@ import { expect } from '@jest/globals' import { tmpdir } from 'os' -import { mkdirp } from 'mkdirp' import { cp, rm, readFile } from 'fs/promises' -import { existsSync } from 'fs' +import { existsSync, mkdirSync } from 'fs' import path from 'path' import matter from 'gray-matter' @@ -56,7 +55,7 @@ describe('automated content directory updates', () => { // structure and contents after running updateContentDirectory. beforeAll(async () => { process.env.TEST_OS_ROOT_DIR = tempDirectory - mkdirp.sync(`${tempContentDirectory}`) + mkdirSync(`${tempContentDirectory}`, { recursive: true }) await cp('src/automated-pipelines/tests/fixtures/content', tempContentDirectory, { recursive: true, }) diff --git a/src/codeql-cli/scripts/sync.js b/src/codeql-cli/scripts/sync.js index 5443037a1281..0749b2693840 100755 --- a/src/codeql-cli/scripts/sync.js +++ b/src/codeql-cli/scripts/sync.js @@ -1,13 +1,11 @@ #!/usr/bin/env node -import { readFile, writeFile, copyFile } from 'fs/promises' -import { existsSync } from 'fs' +import { mkdir, readFile, writeFile, copyFile } from 'fs/promises' +import { existsSync, rmSync } from 'fs' import walk from 'walk-sync' -import { mkdirp } from 'mkdirp' import { execFileSync, execSync } from 'child_process' import path from 'path' import matter from 'gray-matter' -import { rimraf } from 'rimraf' import { updateContentDirectory } from '../../automated-pipelines/lib/update-markdown.js' import { convertContentToDocs } from './convert-markdown-for-docs.js' @@ -74,8 +72,8 @@ async function setupEnvironment() { } // refresh the temp directory - rimraf.sync(TEMP_DIRECTORY) - await mkdirp(TEMP_DIRECTORY) + rmSync(TEMP_DIRECTORY, { recursive: true, force: true }) + await mkdir(TEMP_DIRECTORY, { recursive: true }) } // copy the raw rst files to the temp directory and convert them diff --git a/src/early-access/scripts/symlink-from-local-repo.js b/src/early-access/scripts/symlink-from-local-repo.js index df8c428aebc2..b7a1754b8930 100755 --- a/src/early-access/scripts/symlink-from-local-repo.js +++ b/src/early-access/scripts/symlink-from-local-repo.js @@ -8,7 +8,6 @@ // // [end-readme] -import { rimraf } from 'rimraf' import fs from 'fs' import path from 'path' import { program } from 'commander' @@ -69,7 +68,7 @@ const destinationDirsMap = destinationDirNames.reduce((map, dirName) => { // Remove all existing early access directories from this repo destinationDirNames.forEach((dirName) => { const destDir = destinationDirsMap[dirName] - rimraf.sync(destDir) + fs.rmSync(destDir, { recursive: true, force: true }) console.log(`- Removed symlink for early access directory '${dirName}' from this repo`) }) diff --git a/src/frame/tests/get-remote-json.js b/src/frame/tests/get-remote-json.js index 731dfaaad50e..10cbe8e754c1 100644 --- a/src/frame/tests/get-remote-json.js +++ b/src/frame/tests/get-remote-json.js @@ -2,7 +2,6 @@ import fs from 'fs' import path from 'path' import os from 'os' -import { rimraf } from 'rimraf' import { expect, test, describe, beforeAll, afterAll } from '@jest/globals' import nock from 'nock' import getRemoteJSON, { cache } from '#src/frame/lib/get-remote-json.js' @@ -23,7 +22,7 @@ describe('getRemoteJSON', () => { afterAll(() => { process.env.GET_REMOTE_JSON_DISK_CACHE_ROOT = envVarValueBefore - rimraf.sync(tempDir) + fs.rmSync(tempDir, { recursive: true, force: true }) }) afterEach(() => { diff --git a/src/ghes-releases/scripts/archive-version.js b/src/ghes-releases/scripts/archive-version.js index 5864f1fe4fcd..3cbf21360e68 100755 --- a/src/ghes-releases/scripts/archive-version.js +++ b/src/ghes-releases/scripts/archive-version.js @@ -12,7 +12,6 @@ import path from 'path' import fs from 'fs' import scrape from 'website-scraper' import { program } from 'commander' -import { rimraf } from 'rimraf' import http from 'http' import createApp from '#src/frame/lib/app.js' @@ -149,7 +148,7 @@ async function main() { } // remove temp directory - rimraf.sync(tmpArchivalDirectory) + fs.rmSync(tmpArchivalDirectory, { recursive: true, force: true }) const app = createApp() const server = http.createServer(app) diff --git a/src/ghes-releases/scripts/remove-static-files.js b/src/ghes-releases/scripts/remove-static-files.js index 8daa0e4c1931..fdb5ab1a7f44 100755 --- a/src/ghes-releases/scripts/remove-static-files.js +++ b/src/ghes-releases/scripts/remove-static-files.js @@ -9,7 +9,6 @@ import fs from 'fs' import path from 'path' -import { rimraf } from 'rimraf' import walk from 'walk-sync' import { allVersions } from '#src/versions/lib/all-versions.js' @@ -33,7 +32,7 @@ walk(ghesReleaseNotesDir) // Check if the directory name contains a deprecated GHES version .filter((dir) => deprecatedVersionsHyphenated.some((version) => dir.includes(version))) // Remove the directory - .map((dir) => rimraf.sync(path.join(ghesReleaseNotesDir, dir))) + .map((dir) => fs.rmSync(path.join(ghesReleaseNotesDir, dir), { recursive: true, force: true })) // webhooks and GraphQL const supportedMiscVersions = supportedEnterpriseVersions.map((v) => v.miscVersionName) @@ -60,6 +59,6 @@ function removeFiles(dir, baseName, supportedVersions) { .forEach((file) => { const fullPath = path.join(dir, file) console.log(`removing ${fullPath}`) - rimraf.sync(fullPath) + fs.rmSync(fullPath, { recursive: true, force: true }) }) } diff --git a/src/ghes-releases/scripts/sync-automated-pipeline-data.js b/src/ghes-releases/scripts/sync-automated-pipeline-data.js index 8510d23954f9..75219b6e47e5 100755 --- a/src/ghes-releases/scripts/sync-automated-pipeline-data.js +++ b/src/ghes-releases/scripts/sync-automated-pipeline-data.js @@ -11,11 +11,9 @@ // // [end-readme] -import { existsSync } from 'fs' -import { readFile, readdir, writeFile, cp } from 'fs/promises' -import { rimrafSync } from 'rimraf' +import { existsSync, rmSync } from 'fs' +import { mkdir, readFile, readdir, writeFile, cp } from 'fs/promises' import { difference, intersection } from 'lodash-es' -import { mkdirp } from 'mkdirp' import { deprecated, supported } from '#src/versions/lib/enterprise-server-releases.js' @@ -83,7 +81,7 @@ for (const pipeline of pipelines) { const removeFiles = difference(existingDataDir, expectedDirectory) for (const directory of removeFiles) { console.log(`Removing src/${pipeline}/data/${directory}`) - rimrafSync(`src/${pipeline}/data/${directory}`) + rmSync(`src/${pipeline}/data/${directory}`, { recursive: true, force: true }) } // Get a list of data directories to create (release) and create them @@ -134,11 +132,11 @@ const addRelNoteDirs = difference(supportedHyphenated, ghesReleaseNotesDirs) const removeRelNoteDirs = intersection(deprecatedHyphenated, ghesReleaseNotesDirs) for (const directory of removeRelNoteDirs) { console.log(`Removing data/release-notes/enterprise-server/${directory}`) - rimrafSync(`data/release-notes/enterprise-server/${directory}`) + rmSync(`data/release-notes/enterprise-server/${directory}`, { recursive: true, force: true }) } for (const directory of addRelNoteDirs) { console.log(`Create new directory data/release-notes/enterprise-server/${directory}`) - await mkdirp(`data/release-notes/enterprise-server/${directory}`) + await mkdir(`data/release-notes/enterprise-server/${directory}`, { recursive: true }) await cp( `data/release-notes/PLACEHOLDER-TEMPLATE.yml`, `data/release-notes/enterprise-server/${directory}/PLACEHOLDER.yml`, diff --git a/src/github-apps/scripts/sync.js b/src/github-apps/scripts/sync.js index f12b5a3ef0ff..5bcb56a245f5 100644 --- a/src/github-apps/scripts/sync.js +++ b/src/github-apps/scripts/sync.js @@ -1,8 +1,7 @@ #!/usr/bin/env node import { existsSync } from 'fs' -import { mkdirp } from 'mkdirp' -import { readFile, writeFile } from 'fs/promises' +import { mkdir, readFile, writeFile } from 'fs/promises' import path from 'path' import { slug } from 'github-slugger' import yaml from 'js-yaml' @@ -130,7 +129,7 @@ export async function syncGitHubAppsData(openApiSource, sourceSchemas, progAcces // When a new version is added, we need to create the directory for it if (!existsSync(targetDirectory)) { - await mkdirp(targetDirectory) + await mkdir(targetDirectory, { recursive: true }) } for (const pageType of Object.keys(githubAppsData)) { diff --git a/src/graphql/scripts/sync.js b/src/graphql/scripts/sync.js index 2a3668743931..fa89ad7570ae 100755 --- a/src/graphql/scripts/sync.js +++ b/src/graphql/scripts/sync.js @@ -1,7 +1,6 @@ #!/usr/bin/env node import fs from 'fs/promises' import path from 'path' -import { mkdirp } from 'mkdirp' import yaml from 'js-yaml' import { execSync } from 'child_process' import { getContents, hasMatchingRef } from '#src/workflows/git-utils.js' @@ -163,7 +162,7 @@ function getVersionType(graphqlVersion) { async function updateFile(filepath, content) { console.log(`Updating file ${filepath}`) - await mkdirp(path.dirname(filepath)) + fs.mkdirSync(path.dirname(filepath), { recursive: true }) return fs.writeFile(filepath, content, 'utf8') } diff --git a/src/rest/scripts/update-files.js b/src/rest/scripts/update-files.js index 914a05627342..bf1c6e1ba1a0 100755 --- a/src/rest/scripts/update-files.js +++ b/src/rest/scripts/update-files.js @@ -7,15 +7,13 @@ // // [end-readme] -import { readdir, copyFile, readFile, writeFile, rename } from 'fs/promises' +import { mkdir, readdir, copyFile, readFile, writeFile, rename } from 'fs/promises' import path from 'path' import { program, Option } from 'commander' import { execSync } from 'child_process' -import { rimraf } from 'rimraf' -import { mkdirp } from 'mkdirp' import { fileURLToPath } from 'url' import walk from 'walk-sync' -import { existsSync } from 'fs' +import { existsSync, rmSync } from 'fs' import { syncRestData, getOpenApiSchemaFiles } from './utils/sync.js' import { validateVersionsOptions } from './utils/get-openapi-schemas.js' @@ -72,8 +70,8 @@ main() async function main() { const pipelines = Array.isArray(output) ? output : [output] await validateInputParameters() - rimraf.sync(TEMP_OPENAPI_DIR) - await mkdirp(TEMP_OPENAPI_DIR) + rmSync(TEMP_OPENAPI_DIR, { recursive: true, force: true }) + await mkdir(TEMP_OPENAPI_DIR, { recursive: true }) // If the source repo is github, this is the local development workflow // and the files in github must be bundled and dereferenced first. @@ -97,7 +95,7 @@ async function main() { await copyFile(file, path.join(TEMP_OPENAPI_DIR, baseName)) } - rimraf.sync(TEMP_BUNDLED_OPENAPI_DIR) + rmSync(TEMP_BUNDLED_OPENAPI_DIR, { recursive: true, force: true }) await normalizeDataVersionNames(TEMP_OPENAPI_DIR) // The REST_API_DESCRIPTION_ROOT repo contains all current and @@ -118,7 +116,7 @@ async function main() { derefDir.forEach((schema) => { // if the schema does not start with a current version name, delete it if (!currentOpenApiVersions.find((version) => schema.startsWith(version))) { - rimraf.sync(path.join(TEMP_OPENAPI_DIR, schema)) + rmSync(path.join(TEMP_OPENAPI_DIR, schema), { recursive: true, force: true }) } }) } @@ -183,8 +181,8 @@ async function getBundledFiles() { } // Create a tmp directory to store schema files generated from github/github - rimraf.sync(TEMP_OPENAPI_DIR) - await mkdirp(TEMP_BUNDLED_OPENAPI_DIR) + rmSync(TEMP_OPENAPI_DIR, { recursive: true, force: true }) + await mkdir(TEMP_BUNDLED_OPENAPI_DIR, { recursive: true }) console.log( `\nπŸƒβ€β™€οΈπŸƒπŸƒβ€β™€οΈRunning \`bin/openapi bundle\` in branch '${githubBranch}' of your github/github checkout to generate the dereferenced OpenAPI schema files.\n`, diff --git a/src/rest/scripts/utils/sync.js b/src/rest/scripts/utils/sync.js index 2dba7edbf57b..929c18b9b081 100644 --- a/src/rest/scripts/utils/sync.js +++ b/src/rest/scripts/utils/sync.js @@ -1,7 +1,6 @@ -import { readFile, writeFile } from 'fs/promises' +import { mkdir, readFile, writeFile } from 'fs/promises' import { existsSync } from 'fs' import path from 'path' -import { mkdirp } from 'mkdirp' import { updateRestFiles } from './update-markdown.js' import { allVersions } from '#src/versions/lib/all-versions.js' @@ -51,7 +50,7 @@ export async function syncRestData(sourceDirectory, restSchemas, progAccessSourc ) } if (!existsSync(targetDirectoryPath)) { - await mkdirp(targetDirectoryPath) + await mkdir(targetDirectoryPath, { recursive: true }) } const targetPath = path.join(targetDirectoryPath, REST_SCHEMA_FILENAME) await writeFile(targetPath, JSON.stringify(formattedOperations, null, 2)) diff --git a/src/tests/scripts/copy-fixture-data.js b/src/tests/scripts/copy-fixture-data.js index 0082366af4e8..5056c4407b0e 100755 --- a/src/tests/scripts/copy-fixture-data.js +++ b/src/tests/scripts/copy-fixture-data.js @@ -17,7 +17,6 @@ import path from 'path' import { program } from 'commander' import chalk from 'chalk' -import { mkdirp } from 'mkdirp' // Here, write down all the files that are actually part of the rendering // functionality yet live in data. @@ -77,7 +76,7 @@ async function main(opts) { if (error.code !== 'ENOENT') throw error } if (!opts.dryRun) { - await mkdirp(path.dirname(destination)) + fs.mkdirSync(path.dirname(destination), { recursive: true }) fs.writeFileSync(destination, source, 'utf-8') if (opts.verbose) { console.log(`Copied latest ${chalk.green(file)} to ${chalk.bold(destination)} πŸ‘πŸΌ`) diff --git a/src/webhooks/scripts/sync.js b/src/webhooks/scripts/sync.js index c88e59ce353b..deb07f7f218c 100644 --- a/src/webhooks/scripts/sync.js +++ b/src/webhooks/scripts/sync.js @@ -1,7 +1,6 @@ -import { readFile, writeFile } from 'fs/promises' +import { mkdir, readFile, writeFile } from 'fs/promises' import { existsSync } from 'fs' import path from 'path' -import { mkdirp } from 'mkdirp' import { WEBHOOK_DATA_DIR, WEBHOOK_SCHEMA_FILENAME } from '../lib/index.js' import Webhook from './webhook.js' @@ -37,7 +36,7 @@ export async function syncWebhookData(sourceDirectory, webhookSchemas) { const targetDirectory = path.join(WEBHOOK_DATA_DIR, versionName) if (!existsSync(targetDirectory)) { - await mkdirp(targetDirectory) + await mkdir(targetDirectory, { recursive: true }) } const targetPath = path.join(targetDirectory, WEBHOOK_SCHEMA_FILENAME)