Skip to content

Commit

Permalink
chore: api rewire (#524)
Browse files Browse the repository at this point in the history
  • Loading branch information
vasco-santos committed Oct 22, 2021
1 parent cd04716 commit f4f9cd3
Show file tree
Hide file tree
Showing 52 changed files with 3,303 additions and 1,960 deletions.
2,782 changes: 1,584 additions & 1,198 deletions package-lock.json

Large diffs are not rendered by default.

13 changes: 10 additions & 3 deletions packages/api/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,11 @@ One time set up of your cloudflare worker subdomain for dev:
- Setup Cluster
- You need to run a cluster locally and make it accessible from the internet for development.
- Follow the quickstart guide to get an IPFS Cluster up and running: https://cluster.ipfs.io/documentation/quickstart/
- Install [localtunnel](https://theboroer.github.io/localtunnel-www/) and expose the IPFS Cluster HTTP API and IPFS Proxy API (replacing "USER" with your name):
- Install [localtunnel](https://theboroer.github.io/localtunnel-www/) and expose the IPFS Cluster HTTP API (replacing "USER" with your name):

```sh
npm install -g localtunnel
lt --port 9094 --subdomain USER-cluster-api-web3-storage
npm run lt:cluster
```

- There is an npm script you can use to quickly establish these tunnels during development:
Expand All @@ -26,6 +26,13 @@ One time set up of your cloudflare worker subdomain for dev:
npm run lt
```
- Copy your cloudflare account id from `wrangler whoami`
- Setup database
- For setting up a local database check [Local DB setup](../db/postgres/README.md).
- Once a DB is running, you will need a local tunnel similar to cluster:

```sh
npm run lt:postgres
```
- Update `wrangler.toml` with a new `env`. Set your env name to be the value of `whoami` on your system you can use `npm start` to run the worker in dev mode for you.

[**wrangler.toml**](./wrangler.toml)
Expand All @@ -34,7 +41,7 @@ One time set up of your cloudflare worker subdomain for dev:
[env.bobbytables]
workers_dev = true
account_id = "<what does the `wrangler whoami` say>"
vars = { CLUSTER_API_URL = "https://USER-cluster-api-web3-storage.loca.lt", ENV = "dev" }
vars = { CLUSTER_API_URL = "https://USER-cluster-api-web3-storage.loca.lt", PG_REST_URL = "https://USER-postgres-api-web3-storage.loca.lt", ENV = "dev" }
```
- `npm run build` - Build the bundle
Expand Down
9 changes: 7 additions & 2 deletions packages/api/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -6,17 +6,22 @@
"license": "(Apache-2.0 AND MIT)",
"main": "dist/worker.js",
"scripts": {
"lt": "npm run lt:cluster",
"lt": "npm-run-all -p lt:*",
"lt:cluster": "npx localtunnel --port 9094 --subdomain \"$(whoami)-cluster-api-web3-storage\"",
"lt:postgres": "npx localtunnel --port 3000 --subdomain \"$(whoami)-postgres-api-web3-storage\"",
"start": "wrangler dev --env $(whoami)",
"dev": "wrangler dev --env $(whoami)",
"publish": "wrangler publish --env $(whoami)",
"build": "WEBPACK_CLI_FORCE_LOAD_ESM_CONFIG=true webpack",
"test": "npm-run-all -p -r mock:cluster mock:db mock:backup test:e2e -s test:size",
"test": "npm-run-all -s test:postgres test:fauna",
"test:postgres": "npm-run-all -p -r mock:cluster mock:pgrest mock:backup test:e2e -s test:size",
"test:fauna": "npm-run-all -p -r mock:cluster mock:db mock:backup test:e2e:fauna -s test:size",
"test:size": "bundlesize",
"test:e2e": "playwright-test \"test/**/*.spec.js\" --sw src/index.js -b webkit",
"test:e2e:fauna": "playwright-test \"test/**/*.spec.js\" --sw src/index.js -b webkit --config ./pw-test-fauna.config.cjs",
"mock:cluster": "smoke -p 9094 test/mocks/cluster",
"mock:db": "smoke -p 9086 test/mocks/db",
"mock:pgrest": "smoke -p 9087 test/mocks/pgrest",
"mock:backup": "smoke -p 9096 test/mocks/backup"
},
"devDependencies": {
Expand Down
18 changes: 18 additions & 0 deletions packages/api/pw-test-fauna.config.cjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
const path = require('path')

module.exports = {
buildSWConfig: {
inject: [
path.join(__dirname, 'test', 'scripts', 'node-globals.js'),
path.join(__dirname, 'test', 'scripts', 'worker-globals-fauna.js')
],
plugins: [{
name: 'node builtins',
setup (build) {
build.onResolve({ filter: /^stream$/ }, () => {
return { path: require.resolve('stream-browserify') }
})
}
}]
}
}
4 changes: 4 additions & 0 deletions packages/api/pw-test.config.cjs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,10 @@ module.exports = {
build.onResolve({ filter: /^stream$/ }, () => {
return { path: require.resolve('stream-browserify') }
})

build.onResolve({ filter: /^cross-fetch$/ }, () => {
return { path: path.resolve(__dirname, 'src', 'utils', 'fetch.js') }
})
}
}]
}
Expand Down
29 changes: 4 additions & 25 deletions packages/api/src/auth.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import { gql } from '@web3-storage/db'
import * as JWT from './utils/jwt.js'
import {
UserNotFoundError,
Expand Down Expand Up @@ -125,32 +124,12 @@ async function tryWeb3ApiToken (token, env) {
return apiToken
}

async function findUserByIssuer (issuer, env) {
const res = await env.db.query(gql`
query FindUserByIssuer ($issuer: String!) {
findUserByIssuer(issuer: $issuer) {
_id
issuer
}
}
`, { issuer })
return res.findUserByIssuer
function findUserByIssuer (issuer, env) {
return env.db.getUser(issuer)
}

async function verifyAuthToken (token, decoded, env) {
const res = await env.db.query(gql`
query VerifyAuthToken ($issuer: String!, $secret: String!) {
verifyAuthToken(issuer: $issuer, secret: $secret) {
_id
name
user {
_id
issuer
}
}
}
`, { issuer: decoded.sub, secret: token })
return res.verifyAuthToken
function verifyAuthToken (token, decoded, env) {
return env.db.getKey(decoded.sub, token)
}

function getTokenFromRequest (request, { magic }) {
Expand Down
65 changes: 29 additions & 36 deletions packages/api/src/car.js
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@
/* eslint-env serviceworker */
import { gql } from '@web3-storage/db'
import { PutObjectCommand } from '@aws-sdk/client-s3'
import { CarBlockIterator } from '@ipld/car'
import { toString } from 'uint8arrays'
import { Block } from 'multiformats/block'
import { CID } from 'multiformats/cid'
import { sha256 } from 'multiformats/hashes/sha2'
import * as raw from 'multiformats/codecs/raw'
import * as cbor from '@ipld/dag-cbor'
import * as pb from '@ipld/dag-pb'
import retry from 'p-retry'
import { GATEWAY, LOCAL_ADD_THRESHOLD, MAX_BLOCK_SIZE } from './constants.js'
import { ErrorInvalidCid } from './errors.js'
import { JSONResponse } from './utils/json-response.js'
import { toPinStatusEnum } from './utils/pin.js'

Expand All @@ -19,25 +20,6 @@ import { toPinStatusEnum } from './utils/pin.js'

const decoders = [pb, raw, cbor]

const CREATE_UPLOAD = gql`
mutation CreateUpload($data: CreateUploadInput!) {
createUpload(data: $data) {
content {
_id
dagSize
}
}
}
`

const CREATE_OR_UPDATE_PIN = gql`
mutation CreateOrUpdatePin($data: CreateOrUpdatePinInput!) {
createOrUpdatePin(data: $data) {
_id
}
}
`

// Duration between status check polls in ms.
const PIN_STATUS_CHECK_INTERVAL = 5000
// Max time in ms to spend polling for an OK status.
Expand Down Expand Up @@ -158,19 +140,18 @@ export async function handleCarUpload (request, env, ctx, car, uploadType = 'Car
// Retried because it's possible to receive the error:
// "Transaction was aborted due to detection of concurrent modification."
const { createUpload: upload } = await retry(() => (
env.db.query(CREATE_UPLOAD, {
data: {
user: user._id,
authToken: authToken?._id,
cid,
name,
type: uploadType,
backupUrls: backupKey
? [`https://${env.s3BucketName}.s3.${env.s3BucketRegion}.amazonaws.com/${backupKey}`]
: [],
pins,
dagSize
}
env.db.createUpload({
user: user._id,
authKey: authToken?._id,
contentCid: parseCid(cid),
sourceCid: cid,
name,
type: uploadType,
backupUrls: backupKey
? [`https://${env.s3BucketName}.s3.${env.s3BucketRegion}.amazonaws.com/${backupKey}`]
: [],
pins,
dagSize
})
), {
retries: CREATE_UPLOAD_RETRIES,
Expand Down Expand Up @@ -198,9 +179,7 @@ export async function handleCarUpload (request, env, ctx, car, uploadType = 'Car
if (!okPins.length) continue

for (const pin of okPins) {
await env.db.query(CREATE_OR_UPDATE_PIN, {
data: { content: upload.content._id, ...pin }
})
await env.db.upsertPin(upload.content._id, pin)
}
return
}
Expand Down Expand Up @@ -368,3 +347,17 @@ function toPins (peerMap) {
location: { peerId, peerName }
}))
}

/**
* Parse CID and return v1 and original
*
* @param {string} cid
*/
function parseCid (cid) {
try {
const c = CID.parse(cid)
return c.toV1().toString()
} catch (err) {
throw new ErrorInvalidCid(cid)
}
}
19 changes: 15 additions & 4 deletions packages/api/src/env.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
/* global MAGIC_SECRET_KEY FAUNA_ENDPOINT FAUNA_KEY SALT CLUSTER_BASIC_AUTH_TOKEN CLUSTER_API_URL SENTRY_DSN SENTRY_RELEASE DANGEROUSLY_BYPASS_MAGIC_AUTH */
/* global S3_BUCKET_ENDPOINT S3_BUCKET_NAME S3_BUCKET_REGION S3_ACCESS_KEY_ID S3_SECRET_ACCESS_KEY_ID ENV MAINTENANCE_MODE VERSION COMMITHASH BRANCH */
/* global DATABASE PG_REST_URL PG_REST_JWT */
import Toucan from 'toucan-js'
import { S3Client } from '@aws-sdk/client-s3'
import { Magic } from '@magic-sdk/admin'
Expand Down Expand Up @@ -49,10 +50,20 @@ export function envAll (_, env, event) {
env.DANGEROUSLY_BYPASS_MAGIC_AUTH = DANGEROUSLY_BYPASS_MAGIC_AUTH
}

env.db = new DBClient({
endpoint: env.FAUNA_ENDPOINT || (typeof FAUNA_ENDPOINT === 'undefined' ? undefined : FAUNA_ENDPOINT),
token: env.FAUNA_KEY || FAUNA_KEY
})
if (env.DATABSE === 'fauna' ||
(typeof DATABASE !== 'undefined' && DATABASE === 'fauna') ||
(!env.DATABASE && typeof DATABASE === 'undefined')) {
env.db = new DBClient({
endpoint: env.FAUNA_ENDPOINT || (typeof FAUNA_ENDPOINT === 'undefined' ? undefined : FAUNA_ENDPOINT),
token: env.FAUNA_KEY || FAUNA_KEY
})
} else {
env.db = new DBClient({
endpoint: env.PG_REST_URL || (typeof PG_REST_URL === 'undefined' ? undefined : PG_REST_URL),
token: env.PG_REST_JWT || PG_REST_JWT,
postgres: true
})
}

env.SALT = env.SALT || SALT
env.MODE = env.MAINTENANCE_MODE || (typeof MAINTENANCE_MODE === 'undefined' ? DEFAULT_MODE : MAINTENANCE_MODE)
Expand Down
13 changes: 13 additions & 0 deletions packages/api/src/errors.js
Original file line number Diff line number Diff line change
Expand Up @@ -55,3 +55,16 @@ export class MagicTokenRequiredError extends HTTPError {
}
}
MagicTokenRequiredError.CODE = 'ERROR_MAGIC_TOKEN_REQUIRED'

export class ErrorInvalidCid extends Error {
/**
* @param {string} cid
*/
constructor (cid) {
super(`Invalid CID: ${cid}`)
this.name = 'InvalidCid'
this.status = 400
this.code = ErrorInvalidCid.CODE
}
}
ErrorInvalidCid.CODE = 'ERROR_INVALID_CID'
39 changes: 9 additions & 30 deletions packages/api/src/metrics.js
Original file line number Diff line number Diff line change
@@ -1,28 +1,7 @@
/* global Response caches */

import retry from 'p-retry'
import { gql } from '@web3-storage/db'
import { METRICS_CACHE_MAX_AGE } from './constants.js'

const FIND_METRIC = gql`
query FindMetric($key: String!) {
findMetricByKey(key: $key) {
key
value
updated
}
}
`

/**
* @param {import('@web3-storage/db').DBClient} db
* @param {string} key
*/
async function getMetricValue (db, key) {
const { findMetricByKey } = await retry(() => db.query(FIND_METRIC, { key }))
return findMetricByKey ? findMetricByKey.value : 0
}

/**
* Retrieve metrics in prometheus exposition format.
* https://prometheus.io/docs/instrumenting/exposition_formats/
Expand Down Expand Up @@ -50,15 +29,15 @@ export async function metricsGet (request, env, ctx) {
pinsPinnedTotal,
pinsFailedTotal
] = await Promise.all([
getMetricValue(env.db, 'users_total'),
getMetricValue(env.db, 'uploads_total'),
getMetricValue(env.db, 'content_bytes_total'),
getMetricValue(env.db, 'pins_total'),
getMetricValue(env.db, 'pins_bytes_total'),
getMetricValue(env.db, 'pins_status_queued_total'),
getMetricValue(env.db, 'pins_status_pinning_total'),
getMetricValue(env.db, 'pins_status_pinned_total'),
getMetricValue(env.db, 'pins_status_failed_total')
env.db.getMetricsValue('users_total'),
env.db.getMetricsValue('uploads_total'),
env.db.getMetricsValue('content_bytes_total'),
env.db.getMetricsValue('pins_total'),
env.db.getMetricsValue('pins_bytes_total'),
env.db.getMetricsValue('pins_status_queued_total'),
env.db.getMetricsValue('pins_status_pinning_total'),
env.db.getMetricsValue('pins_status_pinned_total'),
env.db.getMetricsValue('pins_status_failed_total')
])

const metrics = [
Expand Down
Loading

0 comments on commit f4f9cd3

Please sign in to comment.