Skip to content

Commit

Permalink
fix: storage used should not count failed uploads (#1430)
Browse files Browse the repository at this point in the history
Co-authored-by: Adam Alton <adamalton@gmail.com>
  • Loading branch information
flea89 and adamalton authored Jun 15, 2022
1 parent ac10cb0 commit a86d7e2
Show file tree
Hide file tree
Showing 5 changed files with 178 additions and 59 deletions.
19 changes: 14 additions & 5 deletions packages/db/postgres/functions.sql
Original file line number Diff line number Diff line change
Expand Up @@ -256,11 +256,20 @@ DECLARE
BEGIN
uploaded :=
(
SELECT COALESCE(SUM(c.dag_size), 0)
FROM upload u
JOIN content c ON c.cid = u.content_cid
WHERE u.user_id = query_user_id::BIGINT
AND u.deleted_at is null
SELECT COALESCE((
SELECT SUM(dag_size)
FROM (
SELECT c.cid,
c.dag_size
FROM upload u
JOIN content c ON c.cid = u.content_cid
JOIN pin p ON p.content_cid = u.content_cid
WHERE u.user_id = query_user_id::BIGINT
AND u.deleted_at is null
AND p.status = 'Pinned'
GROUP BY c.cid,
c.dag_size
) AS uploaded_content), 0)
);

psa_pinned :=
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
-- Due to the parameters differing from the old function definition, if we don't drop the old one then it's not replaced.
DROP FUNCTION IF EXISTS user_used_storage;

CREATE OR REPLACE FUNCTION user_used_storage(query_user_id BIGINT)
RETURNS stored_bytes
LANGUAGE plpgsql
AS
$$
DECLARE
used_storage stored_bytes;
uploaded BIGINT;
psa_pinned BIGINT;
total BIGINT;
BEGIN
uploaded :=
(
SELECT COALESCE((
SELECT SUM(dag_size)
FROM (
SELECT c.cid,
c.dag_size
FROM upload u
JOIN content c ON c.cid = u.content_cid
JOIN pin p ON p.content_cid = u.content_cid
WHERE u.user_id = query_user_id::BIGINT
AND u.deleted_at is null
AND p.status = 'Pinned'
GROUP BY c.cid,
c.dag_size
) AS uploaded_content), 0)
);

psa_pinned :=
(
SELECT COALESCE((
SELECT SUM(dag_size)
FROM (
SELECT psa_pr.content_cid,
c.dag_size
FROM psa_pin_request psa_pr
JOIN content c ON c.cid = psa_pr.content_cid
JOIN pin p ON p.content_cid = psa_pr.content_cid
JOIN auth_key a ON a.id = psa_pr.auth_key_id
WHERE a.user_id = query_user_id::BIGINT
AND psa_pr.deleted_at is null
AND p.status = 'Pinned'
GROUP BY psa_pr.content_cid,
c.dag_size
) AS pinned_content), 0)
);

total := uploaded + psa_pinned;

SELECT uploaded::TEXT,
psa_pinned::TEXT,
total::TEXT
INTO used_storage;

return used_storage;
END
$$;
6 changes: 3 additions & 3 deletions packages/db/test/pin-sync-request.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import assert from 'assert'
import { DBClient } from '../index.js'

import { createUser, createUserAuthKey, createUpload, defaultPinData, token } from './utils.js'
import { createUser, createUserAuthKey, createUpload, initialPinsNotPinned, token } from './utils.js'

describe('pin-sync-request', () => {
/** @type {DBClient} */
Expand Down Expand Up @@ -51,7 +51,7 @@ describe('pin-sync-request', () => {
assert(pinSyncReqs, 'pin sync requests exist')

// expect pin sync requests = added pins for each upload where status is not pinned
const expectedPinSyncReqs = defaultPinData.filter(pd => pd.status !== 'Pinned').length * 2
const expectedPinSyncReqs = initialPinsNotPinned.filter(pd => pd.status !== 'Pinned').length * 2
assert.strictEqual(pinSyncReqs.length, expectedPinSyncReqs, 'created pin sync requests for non pinned entries')
})

Expand Down Expand Up @@ -96,7 +96,7 @@ describe('pin-sync-request', () => {
assert(pinSyncReqs, 'pin sync requests exist')

// From the 2 setup uploads expected pins
const previousExpectedPinSyncReqs = defaultPinData.filter(pd => pd.status !== 'Pinned').length * 2
const previousExpectedPinSyncReqs = initialPinsNotPinned.filter(pd => pd.status !== 'Pinned').length * 2
// Pins for the new upload
const newPinSyncReqs = pins.filter(pd => pd.status !== 'Pinned').length
assert.strictEqual(pinSyncReqs.length, newPinSyncReqs + previousExpectedPinSyncReqs, 'created pin sync requests for non pinned entries')
Expand Down
42 changes: 23 additions & 19 deletions packages/db/test/user.spec.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
/* eslint-env mocha, browser */
import assert from 'assert'
import { DBClient } from '../index.js'
import { token } from './utils.js'
import { createUpload, initialPinsNotPinned, pinsError, randomCid, token } from './utils.js'

describe('user operations', () => {
const name = 'test-name'
Expand Down Expand Up @@ -166,15 +166,8 @@ describe('user operations', () => {
// Create Upload 1
const cid1 = 'bafybeiczsscdsbs7ffqz55asqdf3smv6klcw3gofszvwlyarci47fgf111'
const dagSize1 = 10000
await client.createUpload({
user: user._id,
contentCid: cid1,
sourceCid: cid1,
authKey: authKey._id,
type: 'Upload',
dagSize: dagSize1,
pins: [],
backupUrls: []
await createUpload(client, user._id, authKey._id, cid1, {
dagSize: dagSize1
})

const firstUsedStorage = await client.getStorageUsed(user._id)
Expand All @@ -183,17 +176,28 @@ describe('user operations', () => {
// Create Upload 2
const cid2 = 'bafybeiczsscdsbs7ffqz55asqdf3smv6klcw3gofszvwlyarci47fgf112'
const dagSize2 = 30000
await client.createUpload({
user: user._id,
contentCid: cid2,
sourceCid: cid2,
authKey: authKey._id,
type: 'Upload',
dagSize: dagSize2,
pins: [],
backupUrls: []

await createUpload(client, user._id, authKey._id, cid2, {
dagSize: dagSize2
})

// Create "Failed" Upload. It should not be counted.
const cid3 = await randomCid()
const dagSize3 = 100000
await createUpload(client, user._id, authKey._id, cid3, {
dagSize: dagSize3,
pins: pinsError
})

// Create Upload not pinned yet. It should not be counted yet.
await createUpload(client, user._id, authKey._id, await randomCid(), {
dagSize: 1000000,
pins: initialPinsNotPinned
})

const usedStorageWithFailed = await client.getStorageUsed(user._id)
assert.strictEqual(usedStorageWithFailed.uploaded, dagSize1 + dagSize2, 'used storage should not count unpinned')

const secondUsedStorage = await client.getStorageUsed(user._id)
assert.strictEqual(secondUsedStorage.uploaded, dagSize1 + dagSize2, 'used storage with second upload')

Expand Down
109 changes: 77 additions & 32 deletions packages/db/test/utils.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,54 @@ import { normalizeCid } from '../../api/src/utils/cid.js'
export const token = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJyb2xlIjoicG9zdGdyZXMifQ.oM0SXF31Vs1nfwCaDxjlczE237KcNKhTpKEYxMX-jEU'
export const dbEndpoint = 'http://127.0.0.1:3000'

export const initialPinsNotPinned = [{
status: 'Pinning',
location: {
peerId: '12D3KooWFe387JFDpgNEVCP5ARut7gRkX7YuJCXMStpkq714ziK6',
peerName: 'web3-storage-sv15',
ipfsPeerId: '12D3KooWR19qPPiZH4khepNjS3CLXiB7AbrbAD4ZcDjN1UjGUNE1',
region: 'region'
}
}]

export const pinsPinned = [
{
status: 'Pinning',
location: {
peerId: '12D3KooWFe387JFDpgNEVCP5ARut7gRkX7YuJCXMStpkq714ziK6',
peerName: 'web3-storage-sv15',
region: 'region'
}
},
{
status: 'Pinned',
location: {
peerId: '12D3KooWFe387JFDpgNEVCP5ARut7gRkX7YuJCXMStpkq714ziK7',
peerName: 'web3-storage-sv16',
region: 'region'
}
}
]

export const pinsError = [
{
status: 'PinError',
location: {
peerId: '12D3KooWFe387JFDpgNEVCP5ARut7gRkX7YuJCXMStpkq714ziK6',
peerName: 'web3-storage-sv15',
region: 'region'
}
},
{
status: 'PinError',
location: {
peerId: '12D3KooWFe387JFDpgNEVCP5ARut7gRkX7YuJCXMStpkq714ziK7',
peerName: 'web3-storage-sv16',
region: 'region'
}
}
]

/**
* @param {number} code
* @returns {Promise<string>}
Expand Down Expand Up @@ -37,27 +85,10 @@ export async function createUser (dbClient, options = {}) {
return dbClient.getUser(issuer)
}

const defaultUserPinnedRequests = [
{
status: 'Pinning',
location: {
peerId: '12D3KooWFe387JFDpgNEVCP5ARut7gRkX7YuJCXMStpkq714ziK6',
peerName: 'web3-storage-sv15',
region: 'region'
}
},
{
status: 'Pinned',
location: {
peerId: '12D3KooWFe387JFDpgNEVCP5ARut7gRkX7YuJCXMStpkq714ziK7',
peerName: 'web3-storage-sv16',
region: 'region'
}
}
]

/**
* Create a user and files with a specified storage quota used
* Create a user and files with a specified storage quota used.
*
* It creates some failed and yet to be pinned content.
* @param {import('../index').DBClient} dbClient
* @param {Object} [options]
* @param {string} [options.email]
Expand Down Expand Up @@ -95,18 +126,42 @@ export async function createUserWithFiles (dbClient, options = {}) {
const pinRequests = 3
const dagSize = Math.ceil(((percentStorageUsed / 100) * storageQuota) / (uploads + pinRequests))

// Create a failed upload.
await createUpload(dbClient, Number(user._id), Number(authKey), await randomCid(), {
dagSize,
pins: pinsError
})

// Create a yet to be pinned upload.
await createUpload(dbClient, Number(user._id), Number(authKey), await randomCid(), {
dagSize,
pins: initialPinsNotPinned
})

for (let i = 0; i < uploads; i++) {
const cid = await randomCid()
await createUpload(dbClient, Number(user._id), Number(authKey), cid, {
dagSize
})
}

// Create a failed PinRequest.
await createPsaPinRequest(dbClient, authKey, await randomCid(), {
dagSize,
pins: pinsError
})

// Create a yet to be pinned PinRequest.
await createPsaPinRequest(dbClient, authKey, await randomCid(), {
dagSize,
pins: initialPinsNotPinned
})

for (let i = 0; i < pinRequests; i++) {
const cid = await randomCid()
await createPsaPinRequest(dbClient, authKey, cid, {
dagSize,
pins: pins || defaultUserPinnedRequests
pins: pins || pinsPinned
})
}

Expand All @@ -130,16 +185,6 @@ export async function createUserAuthKey (dbClient, user, options = {}) {
return _id
}

export const defaultPinData = [{
status: 'Pinning',
location: {
peerId: '12D3KooWFe387JFDpgNEVCP5ARut7gRkX7YuJCXMStpkq714ziK6',
peerName: 'web3-storage-sv15',
ipfsPeerId: '12D3KooWR19qPPiZH4khepNjS3CLXiB7AbrbAD4ZcDjN1UjGUNE1',
region: 'region'
}
}]

/**
* @param {import('../index').DBClient} dbClient
* @param {number} user
Expand All @@ -163,7 +208,7 @@ export async function createUpload (dbClient, user, authKey, cid, options = {})
type: options.type || 'Upload',
dagSize: options.dagSize === undefined ? 1000 : options.dagSize,
name: options.name || `Upload_${new Date().toISOString()}`,
pins: options.pins || defaultPinData,
pins: options.pins || pinsPinned,
backupUrls: options.backupUrls || [initialBackupUrl]
})

Expand Down

0 comments on commit a86d7e2

Please sign in to comment.