From 75720445a30c4dcc8add3642fa02a19ea587d8d7 Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Tue, 17 May 2022 14:09:39 +0200 Subject: [PATCH] feat: api metrics --- .env.tpl | 1 + .github/workflows/cron-metrics.yml | 39 +++++++++ .github/workflows/cron.yml | 26 ++++++ packages/api/db/reset.sql | 3 +- packages/api/db/tables.sql | 9 +++ packages/api/src/constants.js | 2 + packages/api/src/index.js | 10 ++- packages/api/src/metrics.js | 50 ++++++++++++ packages/api/src/utils/db-client.js | 20 +++++ packages/api/test/metrics.spec.js | 25 ++++++ packages/cron/README.md | 26 ++++++ packages/cron/package.json | 23 ++++++ packages/cron/src/bin/metrics.js | 20 +++++ packages/cron/src/jobs/metrics.js | 119 ++++++++++++++++++++++++++++ packages/cron/src/lib/env.js | 16 ++++ packages/cron/src/lib/utils.js | 40 ++++++++++ pnpm-lock.yaml | 42 +++++----- 17 files changed, 450 insertions(+), 21 deletions(-) create mode 100644 .github/workflows/cron-metrics.yml create mode 100644 .github/workflows/cron.yml create mode 100644 packages/api/src/metrics.js create mode 100644 packages/api/test/metrics.spec.js create mode 100644 packages/cron/README.md create mode 100644 packages/cron/package.json create mode 100644 packages/cron/src/bin/metrics.js create mode 100644 packages/cron/src/jobs/metrics.js create mode 100644 packages/cron/src/lib/env.js create mode 100644 packages/cron/src/lib/utils.js diff --git a/.env.tpl b/.env.tpl index 29486fb6..920521d7 100644 --- a/.env.tpl +++ b/.env.tpl @@ -18,3 +18,4 @@ DATABASE_TOKEN=eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpc3MiOiJzdXBhYmFzZSIsImlh # Postgres Database DATABASE_CONNECTION=postgresql://postgres:postgres@localhost:5432/postgres +RO_DATABASE_CONNECTION=postgresql://postgres:postgres@localhost:5432/postgres diff --git a/.github/workflows/cron-metrics.yml b/.github/workflows/cron-metrics.yml new file mode 100644 index 00000000..7859b609 --- /dev/null +++ b/.github/workflows/cron-metrics.yml @@ -0,0 +1,39 @@ +name: Cron Metrics + +on: + schedule: + - cron: '*/20 * * * *' + workflow_dispatch: + +jobs: + update: + name: Calculate metrics + runs-on: ubuntu-latest + strategy: + matrix: + env: ['staging', 'production'] + timeout-minutes: 20 + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + - name: Checkout latest cron release tag + run: | + LATEST_TAG=$(git describe --tags --abbrev=0 --match='cron-*') + git checkout $LATEST_TAG + - uses: pnpm/action-setup@v2.0.1 + with: + version: 6.32.x + - uses: actions/setup-node@v2 + with: + node-version: 16 + - run: pnpm install + - name: Run job + env: + DEBUG: '*' + ENV: ${{ matrix.env }} + STAGING_DATABASE_CONNECTION: ${{ secrets.STAGING_DATABASE_CONNECTION }} + STAGING_RO_DATABASE_CONNECTION: ${{ secrets.STAGING_DATABASE_CONNECTION }} # no replica for staging + PROD_DATABASE_CONNECTION: ${{ secrets.PROD_DATABASE_CONNECTION }} + PROD_RO_DATABASE_CONNECTION: ${{ secrets.PROD_RO_DATABASE_CONNECTION }} + run: pnpm --filter cron start diff --git a/.github/workflows/cron.yml b/.github/workflows/cron.yml new file mode 100644 index 00000000..a115af66 --- /dev/null +++ b/.github/workflows/cron.yml @@ -0,0 +1,26 @@ +name: cron +on: + push: + branches: + - main + paths: + - 'packages/cron/**' + - '.github/workflows/cron.yml' + pull_request: + paths: + - 'packages/cron/**' + - '.github/workflows/cron.yml' +release: + name: Release + runs-on: ubuntu-latest + needs: test + if: github.event_name == 'push' && github.ref == 'refs/heads/main' + steps: + - uses: GoogleCloudPlatform/release-please-action@v3 + id: tag-release + with: + path: packages/cron + token: ${{ secrets.GITHUB_TOKEN }} + release-type: node + monorepo-tags: true + package-name: cron diff --git a/packages/api/db/reset.sql b/packages/api/db/reset.sql index 2e0c665e..7dde6dd7 100644 --- a/packages/api/db/reset.sql +++ b/packages/api/db/reset.sql @@ -1,2 +1,3 @@ DROP TABLE IF EXISTS perma_cache; -DROP TABLE IF EXISTS perma_cache_event; \ No newline at end of file +DROP TABLE IF EXISTS perma_cache_event; +DROP TABLE IF EXISTS metrics; diff --git a/packages/api/db/tables.sql b/packages/api/db/tables.sql index c2ab754f..c014ae37 100644 --- a/packages/api/db/tables.sql +++ b/packages/api/db/tables.sql @@ -43,3 +43,12 @@ CREATE TABLE IF NOT EXISTS public.perma_cache_event ); CREATE INDEX IF NOT EXISTS perma_cache_event_user_id_idx ON perma_cache_event (user_id); + +-- Metric contains the current values of collected metrics. +CREATE TABLE IF NOT EXISTS metric +( + name TEXT PRIMARY KEY, + value BIGINT NOT NULL, + inserted_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL, + updated_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL +); diff --git a/packages/api/src/constants.js b/packages/api/src/constants.js index 8b7ce89c..25e43fa8 100644 --- a/packages/api/src/constants.js +++ b/packages/api/src/constants.js @@ -8,3 +8,5 @@ export const USER_TAGS = { export const MAX_ALLOWED_URL_LENGTH = 460 export const HTTP_STATUS_CONFLICT = 409 + +export const METRICS_CACHE_MAX_AGE = 10 * 60 // in seconds (10 minutes) diff --git a/packages/api/src/index.js b/packages/api/src/index.js index 08ceb816..b10928cc 100644 --- a/packages/api/src/index.js +++ b/packages/api/src/index.js @@ -7,8 +7,13 @@ import { withApiToken, withSuperHotAuthorized, } from './auth.js' -import { permaCachePost, permaCacheListGet, permaCacheStatusGet, permaCacheDelete } from './perma-cache/index.js' - +import { + permaCachePost, + permaCacheListGet, + permaCacheStatusGet, + permaCacheDelete, +} from './perma-cache/index.js' +import { metricsGet } from './metrics.js' import { addCorsHeaders, withCorsHeaders } from './cors.js' import { errorHandler } from './error-handler.js' import { envAll } from './env.js' @@ -24,6 +29,7 @@ const auth = { router .all('*', envAll) + .get('/metrics', withCorsHeaders(metricsGet)) .get('/test', async (request, env, ctx) => { const r = await env.SUPERHOT.get('0.csv') return new Response(r.body) diff --git a/packages/api/src/metrics.js b/packages/api/src/metrics.js new file mode 100644 index 00000000..1595ac6e --- /dev/null +++ b/packages/api/src/metrics.js @@ -0,0 +1,50 @@ +/* global Response caches */ + +import { METRICS_CACHE_MAX_AGE } from './constants.js' + +/** + * Retrieve metrics in prometheus exposition format. + * https://prometheus.io/docs/instrumenting/exposition_formats/ + * @param {Request} request + * @param {import('./env').Env} env + * @param {import('./index').Ctx} ctx + * @returns {Promise} + */ +export async function metricsGet(request, env, ctx) { + const cache = caches.default + let res = await cache.match(request) + if (res) { + return res + } + + const [usersTotal, urlsTotal, eventsTotal, sizeTotal] = await Promise.all([ + env.db.getMetricsValue('users_total'), + env.db.getMetricsValue('urls_total'), + env.db.getMetricsValue('events_total'), + env.db.getMetricsValue('size_total'), + ]) + + const metrics = [ + `# HELP nftlinkapi_permacache_urls_total Total perma cached urls.`, + `# TYPE nftlinkapi_permacache_urls_total counter`, + `nftlinkapi_permacache_urls_total ${urlsTotal}`, + `# HELP nftlinkapi_permacache_users_total Total number of users with perma cached urls.`, + `# TYPE nftlinkapi_permacache_users_total counter`, + `nftlinkapi_permacache_users_total ${usersTotal}`, + `# HELP nftlinkapi_permacache_size_total Total perma cached size.`, + `# TYPE nftlinkapi_permacache_size_total counter`, + `nftlinkapi_permacache_size_total ${sizeTotal}`, + `# HELP nftlinkapi_permacache_events_total Total perma cache events.`, + `# TYPE nftlinkapi_permacache_events_total counter`, + `nftlinkapi_permacache_events_total ${eventsTotal}`, + ].join('\n') + + res = new Response(metrics, { + headers: { + 'Cache-Control': `public, max-age=${METRICS_CACHE_MAX_AGE}`, + }, + }) + ctx.waitUntil(cache.put(request, res.clone())) + + return res +} diff --git a/packages/api/src/utils/db-client.js b/packages/api/src/utils/db-client.js index 4f7d11a1..8e1ecd97 100644 --- a/packages/api/src/utils/db-client.js +++ b/packages/api/src/utils/db-client.js @@ -160,6 +160,26 @@ export class DBClient { return data || 0 } + /** + * Get metrics for a given key. + * + * @param {string} key + */ + async getMetricsValue(key) { + const query = this._client.from('metric') + const { data, error } = await query.select('value').eq('name', key) + + if (error) { + throw new DBError(error) + } + + if (!data || !data.length) { + return 0 + } + + return data[0].value + } + /** * Get user by did * diff --git a/packages/api/test/metrics.spec.js b/packages/api/test/metrics.spec.js new file mode 100644 index 00000000..38efa480 --- /dev/null +++ b/packages/api/test/metrics.spec.js @@ -0,0 +1,25 @@ +import test from 'ava' + +import { getMiniflare } from './scripts/utils.js' + +test.beforeEach((t) => { + // Create a new Miniflare environment for each test + t.context = { + mf: getMiniflare(), + } +}) + +test('Gets metrics content when empty state', async (t) => { + const { mf } = t.context + + const response = await mf.dispatchFetch('https://localhost:8787/metrics') + const metricsResponse = await response.text() + + t.is(metricsResponse.includes('nftlinkapi_permacache_urls_total 0'), true) + t.is(metricsResponse.includes('nftlinkapi_permacache_users_total 0'), true) + t.is(metricsResponse.includes('nftlinkapi_permacache_size_total 0'), true) + t.is( + metricsResponse.includes('nftlinkapi_permacache_purchases_total 0'), + true + ) +}) diff --git a/packages/cron/README.md b/packages/cron/README.md new file mode 100644 index 00000000..6b0ca689 --- /dev/null +++ b/packages/cron/README.md @@ -0,0 +1,26 @@ +


nftstorage.link

+

The cron jobs for housekeeping ✨

+ +## Getting started + +Ensure you have all the dependencies, by running `pnpm i` in the root project. + +The following jobs are available: + +### metrics + +Verify that the following are set in the `.env` file in root of the project monorepo. + +```ini +ENV=dev + +DATABASE_URL=http://localhost:3000 +DATABASE_TOKEN=eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpc3MiOiJzdXBhYmFzZSIsImlhdCI6MTYwMzk2ODgzNCwiZXhwIjoyNTUwNjUzNjM0LCJyb2xlIjoic2VydmljZV9yb2xlIn0.necIJaiP7X2T2QjGeV-FhpkizcNTX8HjDDBAxpgQTEI +DATABASE_CONNECTION=postgres://postgres:postgres@127.0.0.1:5432/postgres +``` + +Run the job: + +```sh +npm run start:metrics +``` diff --git a/packages/cron/package.json b/packages/cron/package.json new file mode 100644 index 00000000..7e6648ee --- /dev/null +++ b/packages/cron/package.json @@ -0,0 +1,23 @@ +{ + "name": "cron", + "version": "0.0.0", + "description": "nftstorage.link Cron Jobs", + "private": true, + "main": "src/index.js", + "type": "module", + "scripts": { + "start": "run-s start:*", + "start:metrics": "NODE_TLS_REJECT_UNAUTHORIZED=0 node src/bin/metrics.js" + }, + "author": "Vasco Santos", + "license": "(Apache-2.0 OR MIT)", + "dependencies": { + "debug": "^4.3.1", + "dotenv": "^9.0.2", + "p-settle": "^5.0.0", + "pg": "^8.7.1" + }, + "devDependencies": { + "npm-run-all": "^4.1.5" + } +} diff --git a/packages/cron/src/bin/metrics.js b/packages/cron/src/bin/metrics.js new file mode 100644 index 00000000..e79606b6 --- /dev/null +++ b/packages/cron/src/bin/metrics.js @@ -0,0 +1,20 @@ +#!/usr/bin/env node + +import { updateMetrics } from '../jobs/metrics.js' +import { envConfig } from '../lib/env.js' +import { getPgPool } from '../lib/utils.js' + +async function main() { + const rwPg = getPgPool(process.env, 'rw') + const roPg = getPgPool(process.env, 'ro') + + try { + await updateMetrics({ rwPg, roPg }) + } finally { + await rwPg.end() + await roPg.end() + } +} + +envConfig() +main() diff --git a/packages/cron/src/jobs/metrics.js b/packages/cron/src/jobs/metrics.js new file mode 100644 index 00000000..9a0e787b --- /dev/null +++ b/packages/cron/src/jobs/metrics.js @@ -0,0 +1,119 @@ +import debug from 'debug' +import settle from 'p-settle' + +import { MAX_CONCURRENT_QUERIES } from '../lib/utils.js' +const log = debug('metrics:updateMetrics') + +/** + * @typedef {import('pg').Pool} Client + * @typedef {{ name: string, value: number }} Metric + */ + +const COUNT_USERS = ` +SELECT COUNT(*) AS total +FROM nftstorage.user u JOIN nftstorage.user_tag ut +ON u.id = ut.user_id +WHERE ut.tag = 'HasSuperHotAccess'::user_tag_type AND ut.value = 'true' +` + +const COUNT_URLS = ` +SELECT COUNT(*) AS total +FROM perma_cache +` + +const COUNT_EVENTS = ` +SELECT COUNT(*) AS total +FROM perma_cache_event +` + +const SUM_SIZE = ` +SELECT COALESCE(SUM(size),0) AS total +FROM perma_cache +` + +const UPDATE_METRIC = ` +INSERT INTO metric (name, value, updated_at) + VALUES ($1, $2, TIMEZONE('utc', NOW())) +ON CONFLICT (name) DO UPDATE + SET value = $2, updated_at = TIMEZONE('utc', NOW()) +` + +/** + * Calculate metrics from RO DB and update their current values in the RW DB. + * + * @param {{ rwPg: Client, roPg: Client }} config + */ +export async function updateMetrics({ roPg, rwPg }) { + const results = await settle([ + withTimeLog('updateUsersCount', () => updateUsersCount(roPg, rwPg)), + withTimeLog('updateUrlsCount', () => updateUrlsCount(roPg, rwPg)), + withTimeLog('updateEventsCount', () => updateEventsCount(roPg, rwPg)), + withTimeLog('updateSizeSum', () => updateSizeSum(roPg, rwPg)), + { concurrency: MAX_CONCURRENT_QUERIES }, + ]) + + let error + for (const promise of results) { + if (promise.isFulfilled) continue + error = error || promise.reason + console.error(promise.reason) + } + + if (error) throw error + log('✅ Done') +} + +/** + * @param {Client} roPg + * @param {Client} rwPg + */ +async function updateUsersCount(roPg, rwPg) { + const { rows } = await roPg.query(COUNT_USERS) + if (!rows.length) throw new Error('no rows returned counting users') + await rwPg.query(UPDATE_METRIC, ['users_total', rows[0].total]) +} + +/** + * @param {Client} roPg + * @param {Client} rwPg + */ +async function updateUrlsCount(roPg, rwPg) { + const { rows } = await roPg.query(COUNT_URLS) + if (!rows.length) throw new Error('no rows returned counting urls') + return rwPg.query(UPDATE_METRIC, ['urls_total', rows[0].total]) +} + +/** + * @param {Client} roPg + * @param {Client} rwPg + */ +async function updateEventsCount(roPg, rwPg) { + const { rows } = await roPg.query(COUNT_EVENTS) + if (!rows.length) throw new Error('no rows returned counting events') + return rwPg.query(UPDATE_METRIC, ['events_total', rows[0].total]) +} + +/** + * @param {Client} roPg + * @param {Client} rwPg + */ +async function updateSizeSum(roPg, rwPg) { + const { rows } = await roPg.query(SUM_SIZE) + if (!rows.length) throw new Error('no rows returned summing sizes') + await rwPg.query(UPDATE_METRIC, ['size_total', rows[0].total]) +} + +/** + * @template T + * @param {string} name + * @param {() => Promise} fn + * @returns {Promise} + */ +async function withTimeLog(name, fn) { + const start = Date.now() + try { + return await fn() + } finally { + log(`${name} took: ${Date.now() - start}ms`) + } +} diff --git a/packages/cron/src/lib/env.js b/packages/cron/src/lib/env.js new file mode 100644 index 00000000..9259d0ba --- /dev/null +++ b/packages/cron/src/lib/env.js @@ -0,0 +1,16 @@ +import path from 'path' +import { fileURLToPath } from 'url' +import dotenv from 'dotenv' + +const __dirname = path.dirname(fileURLToPath(import.meta.url)) + +export function envConfig() { + if (!process.env.ENV || process.env.ENV === 'DEV') { + // use the .env from the root of the momorepo for dev. + dotenv.config({ + path: path.resolve(__dirname, '..', '..', '..', '..', '.env'), + }) + } else { + dotenv.config() + } +} diff --git a/packages/cron/src/lib/utils.js b/packages/cron/src/lib/utils.js new file mode 100644 index 00000000..d1b84c66 --- /dev/null +++ b/packages/cron/src/lib/utils.js @@ -0,0 +1,40 @@ +import pg from 'pg' + +export const MAX_CONCURRENT_QUERIES = 10 + +/** + * Create a new Postgres pool instance from the passed environment variables. + * @param {Record} env + * @param {'ro'|'rw'} [mode] + */ +export function getPgPool(env, mode = 'rw') { + return new pg.Pool({ + connectionString: getPgConnString(env, mode), + max: MAX_CONCURRENT_QUERIES, + }) +} + +/** + * Get a postgres connection string from the passed environment variables. + * @param {Record} env + * @param {'ro'|'rw'} [mode] + */ +function getPgConnString(env, mode = 'rw') { + let connectionString + if (env.ENV === 'production') { + connectionString = + mode === 'rw' + ? env.PROD_DATABASE_CONNECTION + : env.PROD_RO_DATABASE_CONNECTION + } else if (env.ENV === 'staging') { + connectionString = + mode === 'rw' + ? env.STAGING_DATABASE_CONNECTION + : env.STAGING_RO_DATABASE_CONNECTION + } else { + connectionString = + mode === 'rw' ? env.DATABASE_CONNECTION : env.RO_DATABASE_CONNECTION + } + if (!connectionString) throw new Error('missing Postgres connection string') + return connectionString +} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 7342eff3..fdec2b41 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -107,6 +107,21 @@ importers: rollup-plugin-terser: 7.0.2_rollup@2.70.2 typescript: 4.6.4 + packages/cron: + specifiers: + debug: ^4.3.1 + dotenv: ^9.0.2 + npm-run-all: ^4.1.5 + p-settle: ^5.0.0 + pg: ^8.7.1 + dependencies: + debug: 4.3.4 + dotenv: 9.0.2 + p-settle: 5.0.0 + pg: 8.7.3 + devDependencies: + npm-run-all: 4.1.5 + packages/edge-gateway: specifiers: '@sentry/cli': ^1.71.0 @@ -5745,7 +5760,6 @@ packages: integrity: sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==, } engines: { node: '>=4' } - dev: true /buffer/5.7.1: resolution: @@ -6630,7 +6644,6 @@ packages: optional: true dependencies: ms: 2.1.2 - dev: true /debug/4.3.4_supports-color@9.2.2: resolution: @@ -6997,6 +7010,14 @@ packages: engines: { node: '>=12' } dev: true + /dotenv/9.0.2: + resolution: + { + integrity: sha512-I9OvvrHp4pIARv4+x9iuewrWycX6CcZtoAu1XrzPxc5UygMJXJZYmBsynku8IkrJwgypE5DGNjDPmPRhDCptUg==, + } + engines: { node: '>=10' } + dev: false + /dotignore/0.1.2: resolution: { @@ -11419,7 +11440,6 @@ packages: { integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==, } - dev: true /ms/2.1.3: resolution: @@ -12309,7 +12329,6 @@ packages: { integrity: sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==, } - dev: true /parent-module/1.0.1: resolution: @@ -12459,7 +12478,6 @@ packages: { integrity: sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ==, } - dev: true /pg-int8/1.0.1: resolution: @@ -12467,7 +12485,6 @@ packages: integrity: sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==, } engines: { node: '>=4.0.0' } - dev: true /pg-pool/3.5.1_pg@8.7.3: resolution: @@ -12478,14 +12495,12 @@ packages: pg: '>=8.0' dependencies: pg: 8.7.3 - dev: true /pg-protocol/1.5.0: resolution: { integrity: sha512-muRttij7H8TqRNu/DxrAJQITO4Ac7RmX3Klyr/9mJEOBeIpgnF8f9jAfRz5d3XwQZl5qBjF9gLsUtMPJE0vezQ==, } - dev: true /pg-types/2.2.0: resolution: @@ -12499,7 +12514,6 @@ packages: postgres-bytea: 1.0.0 postgres-date: 1.0.7 postgres-interval: 1.2.0 - dev: true /pg/8.7.3: resolution: @@ -12520,7 +12534,6 @@ packages: pg-protocol: 1.5.0 pg-types: 2.2.0 pgpass: 1.0.5 - dev: true /pgpass/1.0.5: resolution: @@ -12529,7 +12542,6 @@ packages: } dependencies: split2: 4.1.0 - dev: true /picocolors/1.0.0: resolution: @@ -12781,12 +12793,10 @@ packages: integrity: sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==, } engines: { node: '>=4' } - dev: true /postgres-bytea/1.0.0: resolution: { integrity: sha1-AntTPAqokOJtFy1Hz5zOzFIazTU= } engines: { node: '>=0.10.0' } - dev: true /postgres-date/1.0.7: resolution: @@ -12794,7 +12804,6 @@ packages: integrity: sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==, } engines: { node: '>=0.10.0' } - dev: true /postgres-interval/1.2.0: resolution: @@ -12804,7 +12813,6 @@ packages: engines: { node: '>=0.10.0' } dependencies: xtend: 4.0.2 - dev: true /prelude-ls/1.1.2: resolution: { integrity: sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ= } @@ -14177,7 +14185,6 @@ packages: integrity: sha512-VBiJxFkxiXRlUIeyMQi8s4hgvKCSjtknJv/LVYbrgALPwf5zSKmEwV9Lst25AkvMDnvxODugjdl6KZgwKM1WYQ==, } engines: { node: '>= 10.x' } - dev: true /sprintf-js/1.0.3: resolution: { integrity: sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= } @@ -15575,7 +15582,6 @@ packages: integrity: sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==, } engines: { node: '>=0.4' } - dev: true /y18n/4.0.3: resolution: @@ -15680,7 +15686,7 @@ packages: require-directory: 2.1.1 string-width: 4.2.3 y18n: 5.0.8 - yargs-parser: 20.2.4 + yargs-parser: 20.2.9 dev: true /yauzl/2.10.0: