Skip to content
This repository has been archived by the owner on Mar 23, 2023. It is now read-only.

Commit

Permalink
feat: split .query into .query and .queryKeys (#34)
Browse files Browse the repository at this point in the history
* feat: split .query into .query and .queryKeys

Applies changes from ipfs/interface-datastore/pull/87

Depends on:

- [ ] ipfs/interface-datastore#87
- [ ] ipfs/js-datastore-core#59

* chore: remove gh urls
  • Loading branch information
achingbrain authored Apr 15, 2021
1 parent e86f9e5 commit 29423d1
Show file tree
Hide file tree
Showing 2 changed files with 47 additions and 32 deletions.
5 changes: 3 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,9 @@
"homepage": "https://github.com/ipfs/js-datastore-s3#readme",
"dependencies": {
"buffer": "^6.0.3",
"datastore-core": "^3.0.0",
"interface-datastore": "^3.0.5"
"datastore-core": "^4.0.0",
"interface-datastore": "^4.0.0",
"it-filter": "^1.0.2"
},
"devDependencies": {
"aegir": "^33.0.0",
Expand Down
74 changes: 44 additions & 30 deletions src/index.js
Original file line number Diff line number Diff line change
@@ -1,17 +1,21 @@
'use strict'

const { Buffer } = require('buffer')

const filter = require('it-filter')
const {
Adapter,
Key,
Errors,
utils: {
filter
}
Errors
} = require('interface-datastore')
const createRepo = require('./s3-repo')

/**
* @typedef {import('interface-datastore').Pair} Pair
* @typedef {import('interface-datastore').Query} Query
* @typedef {import('interface-datastore').KeyQuery} KeyQuery
* @typedef {import('interface-datastore').Options} Options
*/

/**
* A datastore backed by the file system.
*
Expand Down Expand Up @@ -171,16 +175,21 @@ class S3Datastore extends Adapter {
* Recursively fetches all keys from s3
*
* @param {Object} params
* @returns {Iterator<Key>}
* @param {Options} [options]
* @returns {AsyncIterator<Key>}
*/
async * _listKeys (params) {
async * _listKeys (params, options) {
let data
try {
data = await this.opts.s3.listObjectsV2(params).promise()
} catch (err) {
throw new Error(err.code)
}

if (options && options.signal && options.signal.aborted) {
return
}

for (const d of data.Contents) {
// Remove the path from the key
yield new Key(d.Key.slice(this.path.length), false)
Expand All @@ -196,31 +205,17 @@ class S3Datastore extends Adapter {
}
}

/**
* @param {Query} q
* @param {Options} [options]
*/
async * _all (q, options) {
const prefix = [this.path, q.prefix || ''].join('/').replace(/\/\/+/g, '/')

let values = true
if (q.keysOnly != null) {
values = !q.keysOnly
}

// Get all the keys via list object, recursively as needed
const params = {
Prefix: prefix
}
let it = this._listKeys(params)

if (q.prefix != null) {
it = filter(it, k => k.toString().startsWith(q.prefix))
}

for await (const key of it) {
for await (const key of this._allKeys({ prefix: q.prefix }, options)) {
try {
const res = { key }

if (values) {
// Fetch the object Buffer from s3
res.value = await this.get(key)
/** @type {Pair} */
const res = {
key,
value: await this.get(key)
}

yield res
Expand All @@ -233,6 +228,25 @@ class S3Datastore extends Adapter {
}
}

/**
* @param {KeyQuery} q
* @param {Options} [options]
*/
async * _allKeys (q, options) {
const prefix = [this.path, q.prefix || ''].join('/').replace(/\/\/+/g, '/')

// Get all the keys via list object, recursively as needed
let it = this._listKeys({
Prefix: prefix
}, options)

if (q.prefix != null) {
it = filter(it, k => k.toString().startsWith(q.prefix))
}

yield * it
}

/**
* This will check the s3 bucket to ensure access and existence
*
Expand Down

0 comments on commit 29423d1

Please sign in to comment.