Skip to content

Commit

Permalink
feat: db optimizations, backups optimizations (#1664)
Browse files Browse the repository at this point in the history
  • Loading branch information
yocontra authored Apr 1, 2022
1 parent 13d7875 commit ef0cf02
Show file tree
Hide file tree
Showing 8 changed files with 49 additions and 170 deletions.
17 changes: 16 additions & 1 deletion packages/api/db/config.sql
Original file line number Diff line number Diff line change
@@ -1 +1,16 @@
SET max_parallel_workers_per_gather TO 4;
-- PG doesn't support ALTER DATABASE CURRENT, and the db name is different between local/staging/production
-- So we have to execute using variable subsitution
DO $$
BEGIN
EXECUTE 'ALTER DATABASE ' || current_database() || ' SET default_statistics_target = 1000';
EXECUTE 'ALTER DATABASE ' || current_database() || ' SET enable_partitionwise_aggregate = on';
EXECUTE 'ALTER DATABASE ' || current_database() || ' SET enable_partitionwise_join = on';
EXECUTE 'ALTER DATABASE ' || current_database() || ' SET max_parallel_workers_per_gather = 8';
EXECUTE 'ALTER DATABASE ' || current_database() || ' SET max_parallel_workers = 16';
EXECUTE 'ALTER DATABASE ' || current_database() || ' SET max_parallel_maintenance_workers = 8';
EXECUTE 'ALTER DATABASE ' || current_database() || ' SET jit = on';
EXECUTE 'ALTER DATABASE ' || current_database() || ' SET idle_in_transaction_session_timeout = ''1min''';
EXECUTE 'ALTER DATABASE ' || current_database() || ' SET lock_timeout = ''1min''';
EXECUTE 'ALTER DATABASE ' || current_database() || ' SET statement_timeout = ''30s''';
END
$$;
16 changes: 5 additions & 11 deletions packages/api/db/functions.sql
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,7 @@ AS
$$
DECLARE
inserted_upload_id BIGINT;
backup_url TEXT;
BEGIN
SET LOCAL statement_timeout = '30s';

insert into content (cid, dag_size, updated_at, inserted_at)
values (data ->> 'content_cid',
(data ->> 'dag_size')::BIGINT,
Expand All @@ -52,7 +49,10 @@ BEGIN
name,
files,
origins,
meta, updated_at, inserted_at)
meta,
backup_urls,
updated_at,
inserted_at)
values ((data ->> 'user_id')::BIGINT,
(data ->> 'key_id')::BIGINT,
data ->> 'content_cid',
Expand All @@ -63,6 +63,7 @@ BEGIN
(data ->> 'files')::jsonb,
(data ->> 'origins')::jsonb,
(data ->> 'meta')::jsonb,
json_arr_to_text_arr(data -> 'backup_urls'),
(data ->> 'updated_at')::timestamptz,
(data ->> 'inserted_at')::timestamptz)
ON CONFLICT ( user_id, source_cid )
Expand All @@ -74,13 +75,6 @@ BEGIN
mime_type = data ->> 'mime_type',
type = (data ->> 'type')::upload_type
RETURNING id INTO inserted_upload_id;

FOREACH backup_url IN ARRAY json_arr_to_text_arr(data -> 'backup_urls')
LOOP
INSERT INTO backup (upload_id, url, inserted_at)
VALUES (inserted_upload_id, backup_url, (data ->> 'inserted_at')::TIMESTAMPTZ)
ON CONFLICT (upload_id, url) DO NOTHING;
END LOOP;
END
$$;

Expand Down
3 changes: 3 additions & 0 deletions packages/api/db/reset.sql
Original file line number Diff line number Diff line change
Expand Up @@ -17,3 +17,6 @@ DROP TABLE IF EXISTS cargo.aggregate_entries;
DROP TABLE IF EXISTS cargo.aggregates;
DROP TABLE IF EXISTS cargo.deals;
DROP SERVER IF EXISTS dag_cargo_server CASCADE;

-- Reset settings from config.sql
ALTER DATABASE postgres RESET ALL;
24 changes: 6 additions & 18 deletions packages/api/db/tables.sql
Original file line number Diff line number Diff line change
Expand Up @@ -174,13 +174,18 @@ CREATE TABLE IF NOT EXISTS upload
-- 1. Pinning Service API user provided `Record<string, string>`.
-- 2. Metaplex endpoint `/metaplex/upload` to store details of the Metaplex user.
meta jsonb,
backup_urls text[],
inserted_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
deleted_at TIMESTAMP WITH TIME ZONE,
UNIQUE (user_id, source_cid)
);

CREATE INDEX IF NOT EXISTS upload_inserted_at_idx ON upload (inserted_at);
CREATE INDEX IF NOT EXISTS upload_content_cid_idx ON upload (content_cid);
CREATE INDEX IF NOT EXISTS upload_source_cid_idx ON upload (source_cid);
CREATE INDEX IF NOT EXISTS upload_updated_at_idx ON upload (updated_at);
CREATE INDEX IF NOT EXISTS upload_type_idx ON upload (type);

CREATE VIEW admin_search as
select
Expand All @@ -196,28 +201,11 @@ from public.user u
full outer join auth_key ak on ak.user_id = u.id
full outer join (select * from auth_key_history where deleted_at is null) as akh on akh.auth_key_id = ak.id;

CREATE INDEX IF NOT EXISTS upload_content_cid_idx ON upload (content_cid);
CREATE INDEX IF NOT EXISTS upload_source_cid_idx ON upload (source_cid);
CREATE INDEX IF NOT EXISTS upload_updated_at_idx ON upload (updated_at);
CREATE INDEX IF NOT EXISTS upload_type_idx ON upload (type);

-- Metric contains the current values of collected metrics.
CREATE TABLE IF NOT EXISTS metric
(
name TEXT PRIMARY KEY,
value BIGINT NOT NULL,
inserted_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
updated_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL
);

-- URLs of backups of user uploads
CREATE TABLE IF NOT EXISTS backup
(
id BIGSERIAL PRIMARY KEY,
upload_id BIGINT NOT NULL REFERENCES public.upload (id),
url TEXT NOT NULL,
inserted_at TIMESTAMP WITH TIME ZONE DEFAULT timezone('utc'::text, now()) NOT NULL,
UNIQUE (upload_id, url)
);

CREATE INDEX IF NOT EXISTS backup_upload_id_idx ON backup (upload_id);
);
12 changes: 8 additions & 4 deletions packages/api/docker/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,16 @@ services:
db:
build:
context: ./postgres
deploy:
resources:
limits:
cpus: '2'
memory: 2G
reservations:
cpus: '1'
memory: 1G
ports:
- 5432:5432
command:
- postgres
- -c
- wal_level=logical
environment:
POSTGRES_DB: postgres
POSTGRES_USER: postgres
Expand Down
5 changes: 4 additions & 1 deletion packages/api/docker/postgres/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM supabase/postgres:0.13.0
FROM supabase/postgres:13.3.0

COPY 00-initial-schema.sql /docker-entrypoint-initdb.d/00-initial-schema.sql

Expand All @@ -9,3 +9,6 @@ ENV POSTGRES_PASSWORD=postgres
ENV POSTGRES_PORT=5432

EXPOSE 5432

# Enables cat /var/lib/postgresql/data/pg_log/postgresql.log within the container to debug queries
CMD ["postgres", "-c", "wal_level=logical", "-c", "log_statement=all", "-c", "pg_stat_statements.track=all"]
132 changes: 5 additions & 127 deletions packages/api/src/utils/db-types.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -259,102 +259,6 @@ export interface paths {
}
}
}
'/backup': {
get: {
parameters: {
query: {
id?: parameters['rowFilter.backup.id']
upload_id?: parameters['rowFilter.backup.upload_id']
url?: parameters['rowFilter.backup.url']
inserted_at?: parameters['rowFilter.backup.inserted_at']
/** Filtering Columns */
select?: parameters['select']
/** Ordering */
order?: parameters['order']
/** Limiting and Pagination */
offset?: parameters['offset']
/** Limiting and Pagination */
limit?: parameters['limit']
}
header: {
/** Limiting and Pagination */
Range?: parameters['range']
/** Limiting and Pagination */
'Range-Unit'?: parameters['rangeUnit']
/** Preference */
Prefer?: parameters['preferCount']
}
}
responses: {
/** OK */
200: {
schema: definitions['backup'][]
}
/** Partial Content */
206: unknown
}
}
post: {
parameters: {
body: {
/** backup */
backup?: definitions['backup']
}
query: {
/** Filtering Columns */
select?: parameters['select']
}
header: {
/** Preference */
Prefer?: parameters['preferReturn']
}
}
responses: {
/** Created */
201: unknown
}
}
delete: {
parameters: {
query: {
id?: parameters['rowFilter.backup.id']
upload_id?: parameters['rowFilter.backup.upload_id']
url?: parameters['rowFilter.backup.url']
inserted_at?: parameters['rowFilter.backup.inserted_at']
}
header: {
/** Preference */
Prefer?: parameters['preferReturn']
}
}
responses: {
/** No Content */
204: never
}
}
patch: {
parameters: {
query: {
id?: parameters['rowFilter.backup.id']
upload_id?: parameters['rowFilter.backup.upload_id']
url?: parameters['rowFilter.backup.url']
inserted_at?: parameters['rowFilter.backup.inserted_at']
}
body: {
/** backup */
backup?: definitions['backup']
}
header: {
/** Preference */
Prefer?: parameters['preferReturn']
}
}
responses: {
/** No Content */
204: never
}
}
}
'/content': {
get: {
parameters: {
Expand Down Expand Up @@ -1154,27 +1058,6 @@ export interface definitions {
/** Format: timestamp with time zone */
deleted_at?: string
}
backup: {
/**
* Format: bigint
* @description Note:
* This is a Primary Key.<pk/>
*/
id: number
/**
* Format: bigint
* @description Note:
* This is a Foreign Key to `upload.id`.<fk table='upload' column='id'/>
*/
upload_id: number
/** Format: text */
url: string
/**
* Format: timestamp with time zone
* @default timezone('utc'::text, now())
*/
inserted_at: string
}
content: {
/**
* Format: text
Expand Down Expand Up @@ -1282,6 +1165,11 @@ export interface definitions {
origins?: string
/** Format: jsonb */
meta?: string
/**
* Format: text[]
* @description Note:
*/
backup_urls: string[]
/**
* Format: timestamp with time zone
* @default timezone('utc'::text, now())
Expand Down Expand Up @@ -1432,16 +1320,6 @@ export interface parameters {
'rowFilter.auth_key_history.inserted_at': string
/** Format: timestamp with time zone */
'rowFilter.auth_key_history.deleted_at': string
/** @description backup */
'body.backup': definitions['backup']
/** Format: bigint */
'rowFilter.backup.id': string
/** Format: bigint */
'rowFilter.backup.upload_id': string
/** Format: text */
'rowFilter.backup.url': string
/** Format: timestamp with time zone */
'rowFilter.backup.inserted_at': string
/** @description content */
'body.content': definitions['content']
/** Format: text */
Expand Down
10 changes: 2 additions & 8 deletions packages/api/test/nfts-upload.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -281,13 +281,7 @@ describe('NFT Upload ', () => {

const upload = await client.client.getUpload(value.cid, client.userId)
assert(upload)

const { data: backup } = await rawClient
.from('backup')
.select('*')
.match({ upload_id: upload.id })
.single()
assert(backup) // should have a backup for this upload
assert(upload.backup_urls)

/**
* @param {Uint8Array} data
Expand All @@ -302,7 +296,7 @@ describe('NFT Upload ', () => {
const carHash = await getHash(new Uint8Array(carBuf))
const backupUrl = `${S3_ENDPOINT}/${S3_BUCKET_NAME}/raw/${root}/nft-${client.userId}/${carHash}.car`

assert.equal(backup.url, backupUrl)
assert.equal(upload.backup_urls[0], backupUrl)
})

it('should upload a single file using ucan', async () => {
Expand Down

0 comments on commit ef0cf02

Please sign in to comment.