Skip to content

Commit

Permalink
Merge branch main of github.com:nftstorage/nft.storage into feat/issu…
Browse files Browse the repository at this point in the history
…e-1827-storybook-init
  • Loading branch information
cmunns committed Apr 27, 2022
2 parents d2b236c + 72480d6 commit 93f5e0a
Show file tree
Hide file tree
Showing 26 changed files with 400 additions and 117 deletions.
14 changes: 14 additions & 0 deletions packages/api/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,19 @@
# Changelog

## [2.21.0](https://github.com/nftstorage/nft.storage/compare/api-v2.20.0...api-v2.21.0) (2022-04-21)


### Features

* add HasSuperHotAccess user tag ([#1838](https://github.com/nftstorage/nft.storage/issues/1838)) ([019a505](https://github.com/nftstorage/nft.storage/commit/019a505e8f4bb93a24b8c480646779f5e4b66326))
* add permissions endpoint ([#1753](https://github.com/nftstorage/nft.storage/issues/1753)) ([2f5b6fb](https://github.com/nftstorage/nft.storage/commit/2f5b6fb2660231e4591f37e29389c41ac7045605))


### Bug Fixes

* pin magic admin ([#1841](https://github.com/nftstorage/nft.storage/issues/1841)) ([1bec0b0](https://github.com/nftstorage/nft.storage/commit/1bec0b03dcf13f700ceeaf2a0e3cce8f291f0072))
* remove cluster API URL from /version endpoint ([#1843](https://github.com/nftstorage/nft.storage/issues/1843)) ([cece7ce](https://github.com/nftstorage/nft.storage/commit/cece7ce7773b9c37a18d8c0f33b0ec307474e294))

## [2.20.0](https://github.com/nftstorage/nft.storage/compare/api-v2.19.0...api-v2.20.0) (2022-04-12)


Expand Down
9 changes: 9 additions & 0 deletions packages/api/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,15 @@ When prompted for a value enter one of the following permission combinations:
- `r-` = read only mode
- `rw` = read and write (normal operation)

## DB Types

The postgres rest api can generate automatic type defs based on the table and column
names. To use this, make sure you've spun down your nft dev environment and run
`yarn db-types` from packages/api root directory. This will auto generate the
`packages/api/utils/db-types.d.ts` file.

Common errors would be "cannot read version of schema", this typically indicates that another service running on localhost:3000 which is the default port and url for the postgres rest api.

## S3 Setup

We use [S3](https://aws.amazon.com/s3/) for backup and disaster recovery. For production deployment an account on AWS is required.
2 changes: 1 addition & 1 deletion packages/api/db/cargo.sql
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,6 @@ CREATE SCHEMA IF NOT EXISTS cargo;

-- Import dag cargo schema
IMPORT FOREIGN SCHEMA cargo
LIMIT TO (aggregate_entries, aggregates, deals, dags)
LIMIT TO (aggregate_entries, aggregates, deals, dags, metrics, metrics_log)
FROM SERVER dag_cargo_server
INTO cargo;
58 changes: 56 additions & 2 deletions packages/api/db/cargo.testing.sql
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,29 @@ CREATE TABLE IF NOT EXISTS cargo.deals (
entry_last_updated TIMESTAMP WITH TIME ZONE NOT NULL
);

-- Test data
CREATE TABLE IF NOT EXISTS cargo.metrics (
name TEXT NOT NULL,
dimensions TEXT[],
description TEXT NOT NULL,
value BIGINT,
collected_at TIMESTAMP WITH TIME ZONE,
collection_took_seconds NUMERIC NOT NULL
);

CREATE TABLE IF NOT EXISTS cargo.metrics_log (
name TEXT NOT NULL,
dimensions TEXT[],
value BIGINT,
collected_at TIMESTAMP WITH TIME ZONE
);

-- Test data for cargo tables

INSERT INTO cargo.metrics_log (name, dimensions, value, collected_at) VALUES
('dagcargo_project_bytes_in_active_deals', '{{project,staging.nft.storage}}', 167859554927623, '2022-04-01 13:41:08.479404+00');

INSERT INTO cargo.metrics_log (name, dimensions, value, collected_at) VALUES
('dagcargo_project_bytes_in_active_deals', '{{project,nft.storage}}', 169334115720738, '2022-03-01 16:33:28.505513+00');

INSERT INTO cargo.aggregate_entries ("aggregate_cid", "cid_v1", "datamodel_selector") VALUES
('bafybeiek5gau46j4dxoyty27qtirb3iuoq7aax4l3xt25mfk2igyt35bme', 'bafybeiaj5yqocsg5cxsuhtvclnh4ulmrgsmnfbhbrfxrc3u2kkh35mts4e', 'Links/19/Hash/Links/46/Hash/Links/0/Hash');
Expand All @@ -46,6 +68,38 @@ INSERT INTO cargo.aggregates ("aggregate_cid", "piece_cid", "sha256hex", "export
INSERT INTO cargo.deals ("deal_id", "aggregate_cid", "client", "provider", "status", "start_epoch", "end_epoch", "entry_created", "entry_last_updated", "status_meta", "start_time", "sector_start_epoch", "sector_start_time", "end_time") VALUES
(2424132, 'bafybeiek5gau46j4dxoyty27qtirb3iuoq7aax4l3xt25mfk2igyt35bme', 'f144zep4gitj73rrujd3jw6iprljicx6vl4wbeavi', 'f0678914', 'active', 1102102, 2570902, '2021-09-09 16:30:52.252233+00', '2021-09-10 00:45:50.408956+00', 'containing sector active as of 2021-09-10 00:36:30 at epoch 1097593', '2021-09-11 14:11:00+00', 1097593, '2021-09-10 00:36:30+00', '2023-02-03 14:11:00+00');

INSERT INTO cargo.metrics (name, dimensions, description, value, collected_at, collection_took_seconds) VALUES
('dagcargo_project_items_in_active_deals', '{{project,staging.web3.storage}}', 'Count of aggregated items with at least one active deal per project', 1438, '2022-04-14 23:56:46.803497+00', 405.292);

INSERT INTO cargo.metrics (name, dimensions, description, value, collected_at, collection_took_seconds) VALUES
('dagcargo_project_items_in_active_deals', '{{project,nft.storage}}', 'Count of aggregated items with at least one active deal per project', 56426047, '2022-04-14 23:56:46.806892+00', 405.292);

INSERT INTO cargo.metrics (name, dimensions, description, value, collected_at, collection_took_seconds) VALUES
('dagcargo_project_bytes_in_active_deals', '{{project,nft.storage}}', 'Amount of per-DAG-deduplicated bytes with at least one active deal per project', 169389985753391, '2022-04-14 23:51:45.76915+00', 104.256);

INSERT INTO cargo.metrics (name, dimensions, description, value, collected_at, collection_took_seconds) VALUES
('dagcargo_project_bytes_in_active_deals', '{{project,staging.web3.storage}}', 'Amount of per-DAG-deduplicated bytes with at least one active deal per project', 133753809372, '2022-04-14 23:51:45.76712+00', 104.256);

INSERT INTO cargo.metrics (name, dimensions, description, value, collected_at, collection_took_seconds) VALUES
('dagcargo_project_bytes_in_active_deals', '{{project,web3.storage}}', 'Amount of per-DAG-deduplicated bytes with at least one active deal per project', 181663391277785, '2022-04-14 23:51:45.768323+00', 104.256);

INSERT INTO public.metric (name, value, updated_at)
VALUES ('uploads_past_7_total', 2011366, TIMEZONE('utc', NOW()));

INSERT INTO public.metric (name, value, updated_at)
VALUES ('uploads_nft_total', 685866, TIMEZONE('utc', NOW()));

INSERT INTO public.metric (name, value, updated_at)
VALUES ('uploads_remote_total', 11077834, TIMEZONE('utc', NOW()));

INSERT INTO public.metric (name, value, updated_at)
VALUES ('uploads_car_total', 17711308, TIMEZONE('utc', NOW()));

INSERT INTO public.metric (name, value, updated_at)
VALUES ('uploads_multipart_total', 1456388, TIMEZONE('utc', NOW()));

INSERT INTO public.metric (name, value, updated_at)
VALUES ('uploads_blob_total', 12420729, TIMEZONE('utc', NOW()));

INSERT INTO public."user" (magic_link_id, github_id, name, email, public_address) VALUES ('did:ethr:0x65007A739ab7AC5c537161249b81250E49e2853C', 'github|000000', 'mock user', 'test@gmail.com', '0x65007A739ab7AC5c537161249b81250E49e2853C');
INSERT INTO public.auth_key (name, secret, user_id) VALUES ('main', 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJkaWQ6ZXRocjoweDY1MDA3QTczOWFiN0FDNWM1MzcxNjEyNDliODEyNTBFNDllMjg1M0MiLCJpc3MiOiJuZnQtc3RvcmFnZSIsImlhdCI6MTYzOTc1NDczNjYzOCwibmFtZSI6Im1haW4ifQ.wKwJIRXXHsgwVp8mOQp6r3_F4Lz5lnoAkgVP8wqwA_Y', 1);
INSERT INTO public.auth_key (name, secret, user_id) VALUES ('main', 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJkaWQ6ZXRocjoweDY1MDA3QTczOWFiN0FDNWM1MzcxNjEyNDliODEyNTBFNDllMjg1M0MiLCJpc3MiOiJuZnQtc3RvcmFnZSIsImlhdCI6MTYzOTc1NDczNjYzOCwibmFtZSI6Im1haW4ifQ.wKwJIRXXHsgwVp8mOQp6r3_F4Lz5lnoAkgVP8wqwA_Y', 1);
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
-- Import dag cargo schema
IMPORT FOREIGN SCHEMA cargo
LIMIT TO (metrics, metrics_log)
FROM SERVER dag_cargo_server
INTO cargo;
2 changes: 1 addition & 1 deletion packages/api/docker/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ services:
- 3000:3000/tcp
environment:
PGRST_DB_URI: postgres://postgres:postgres@db:5432/postgres
PGRST_DB_SCHEMA: public
PGRST_DB_SCHEMAS: public,cargo
PGRST_DB_ANON_ROLE: postgres
PGRST_JWT_SECRET: super-secret-jwt-token-with-at-least-32-characters-long
db:
Expand Down
2 changes: 1 addition & 1 deletion packages/api/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "api",
"version": "2.20.0",
"version": "2.21.0",
"description": "NFT Storage API",
"private": true,
"type": "module",
Expand Down
3 changes: 1 addition & 2 deletions packages/api/src/routes/metrics.js
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,7 @@ export async function metrics(_, { db }) {

/** @type {import('../bindings').Handler} */
export async function getStats(_, { db }) {
let stats = await db.getStats()

const stats = await db.getStats()
return new JSONResponse(
{
ok: true,
Expand Down
12 changes: 12 additions & 0 deletions packages/api/src/utils/db-client-types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -89,3 +89,15 @@ export interface ListUploadsOptions {
limit?: number
meta?: unknown
}

export type StatsPayload = {
[key: string]: number
deals_size_total: number
deals_size_total_prev: number
uploads_blob_total: number
uploads_car_total: number
uploads_multipart_total: number
uploads_nft_total: number
uploads_past_7_total: number
uploads_remote_total: number
}
78 changes: 65 additions & 13 deletions packages/api/src/utils/db-client.js
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,14 @@ export class DBClient {
apikey: `${token}`,
},
})

this.cargoClient = new PostgrestClient(url, {
headers: {
Authorization: `Bearer ${token}`,
apikey: `${token}`,
},
schema: 'cargo',
})
}

/**
Expand Down Expand Up @@ -538,14 +546,20 @@ export class DBClient {
return data[0].value
}

/**
* Get stats for uploads and cargo deals
*
* @returns {Promise<import('./db-client-types').StatsPayload>}
*/
async getStats() {
/** @type {PostgrestQueryBuilder<definitions['metric']>} */
const query = this.client.from('metric')
const { data, error } = await query
const nonCargoMetricQuery = this.client.from('metric')
const metricsQuery = this.cargoClient.from('metrics')
const metricsLogQuery = this.cargoClient.from('metrics_log')

const primaryMetricQuery = nonCargoMetricQuery
.select('name, value')
.in('name', [
'deals_total',
'deals_size_total',
'uploads_past_7_total',
'uploads_blob_total',
'uploads_car_total',
Expand All @@ -554,19 +568,57 @@ export class DBClient {
'uploads_multipart_total',
])

if (error) {
throw new DBError(error)
const dagByteSizeQuery = metricsQuery
.select('name, dimensions, value')
.match({
name: 'dagcargo_project_bytes_in_active_deals',
dimensions: '{{project,nft.storage}}',
})
.single()

const weekAgo = new Date()
weekAgo.setDate(weekAgo.getDate() - 7)

const dagByteSizeHistory = metricsLogQuery
.select('name, dimensions, value')
.match({
name: 'dagcargo_project_bytes_in_active_deals',
dimensions: '{{project,nft.storage}}',
})
.lte('collected_at', weekAgo.toISOString())
.order('collected_at', { ascending: false })
.range(0, 1)
.single()

const [primaryRes, dagSizeRes, dagSizeHistRes] = await Promise.all([
primaryMetricQuery,
dagByteSizeQuery,
dagByteSizeHistory,
])

if (primaryRes.error || dagSizeHistRes.error || dagSizeRes.error) {
// this allows us to avoid changing the construtor of db error to allow null
const err = Object.assign(
{},
primaryRes.error || dagSizeHistRes.error || dagSizeRes.error
)
throw new DBError(err)
}

if (!data || !data.length) {
return undefined
/** @type {import('./db-client-types').StatsPayload} */
const stats = {}

// Simple splatting of the metrics from first query
if (primaryRes.data && primaryRes.data.length) {
for (const metric of primaryRes.data) {
stats[metric.name] = metric.value
}
}

return data.reduce((obj, curr) => {
// @ts-ignore
obj[curr.name] = curr.value
return obj
}, {})
stats.deals_size_total = dagSizeRes.data.value
stats.deals_size_total_prev = dagSizeHistRes.data.value

return stats
}
}

Expand Down
Loading

0 comments on commit 93f5e0a

Please sign in to comment.