diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 50d0bab7..0deea63e 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -9,8 +9,8 @@ on: - '**' env: - BEE_VERSION: '1.11.1' - FAIROS_IMAGE: 'fairdatasociety/fairos-dfs:v0.9.6' + BEE_VERSION: '1.13.0' + FAIROS_IMAGE: 'fairdatasociety/fairos-dfs:v0.10.0-rc6' jobs: nodejs: @@ -39,7 +39,7 @@ jobs: run: npm install -g @fairdatasociety/fdp-play - name: Run fdp-play - run: fdp-play start -d --fairos --fairos-image $FAIROS_IMAGE --bee-version $BEE_VERSION + run: fdp-play start -d --bee-version $BEE_VERSION ## Try getting the node modules from cache, if failed npm ci - uses: actions/cache@v3 @@ -48,8 +48,8 @@ jobs: path: node_modules key: ${{ runner.os }}-node-${{ matrix.node }}-${{ hashFiles('**/package-lock.json') }} restore-keys: | - ${{ runner.OS }}-node-${{ matrix.node }}-${{ env.cache-name }}- - ${{ runner.OS }}-node-${{ matrix.node }}- + ${{ runner.os }}-node-${{ matrix.node }}-${{ env.cache-name }}- + ${{ runner.os }}-node-${{ matrix.node }}- - name: Install npm deps if: steps.cache-npm.outputs.cache-hit != 'true' @@ -58,6 +58,51 @@ jobs: - name: Run unit and integration tests for node run: npm run test:node -- --detectOpenHandles + - name: Run migration tests for node + run: npm run test:migration -- --detectOpenHandles + + fairos: + runs-on: ubuntu-latest + + strategy: + matrix: + node-version: [16.x, 18.x] + + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 1 + + - name: Use Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v3 + with: + node-version: ${{ matrix.node-version }} + + - name: Auth to Github Package Docker Registry + run: | + echo "${{ secrets.GITHUB_TOKEN }}" | docker login https://docker.pkg.github.com -u ${GITHUB_ACTOR} --password-stdin + + - name: Install fdp-play + run: npm install -g @fairdatasociety/fdp-play + + - name: Run fdp-play + run: fdp-play start -d --fairos --fairos-image $FAIROS_IMAGE --bee-version $BEE_VERSION + + ## Try getting the node modules from cache, if failed npm ci + - uses: actions/cache@v3 + id: cache-npm + with: + path: node_modules + key: ${{ runner.os }}-node-${{ matrix.node }}-${{ hashFiles('**/package-lock.json') }} + restore-keys: | + ${{ runner.os }}-node-${{ matrix.node }}-${{ env.cache-name }}- + ${{ runner.os }}-node-${{ matrix.node }}- + + - name: Install npm deps + if: steps.cache-npm.outputs.cache-hit != 'true' + run: npm ci + - name: Run unit and integration tests for FairOS run: npm run test:fairos -- --detectOpenHandles @@ -96,8 +141,8 @@ jobs: path: node_modules key: ${{ runner.os }}-node-${{ matrix.node }}-${{ hashFiles('**/package-lock.json') }} restore-keys: | - ${{ runner.OS }}-node-${{ matrix.node }}-${{ env.cache-name }}- - ${{ runner.OS }}-node-${{ matrix.node }}- + ${{ runner.os }}-node-${{ matrix.node }}-${{ env.cache-name }}- + ${{ runner.os }}-node-${{ matrix.node }}- - name: Install npm deps if: steps.cache-npm.outputs.cache-hit != 'true' diff --git a/.gitignore b/.gitignore index 5a8a3f14..eedca6fc 100644 --- a/.gitignore +++ b/.gitignore @@ -30,3 +30,5 @@ node_modules docs .vscode + +test/fdp-storage diff --git a/README.md b/README.md index 84d056e7..0c8933b4 100644 --- a/README.md +++ b/README.md @@ -233,7 +233,7 @@ const blocks = [] for (let i = 0; i < blocksCount; i++) { const dataBlock = getDataBlock(data, blockSize, i) // fdp instance with or without logged in user - blocks.push(await fdp.file.uploadDataBlock(dataBlock, i)) + blocks.push(await fdp.file.uploadDataBlock(dataBlock, i, dataBlock.length)) } // fdp instance with logged in user @@ -353,6 +353,18 @@ And to get pod information of a subItem: const podShareInfo = await fdp.personalStorage.openSubscribedPod(subItems[0].subHash, subItems[0].unlockKeyLocation) ``` +## Data migration + +Starting from the version `0.18.0`, pods and directories are stored in different format than the older versions. For all new accounts this doesn't have any impact. But to access pods and folders from existing accounts, migration is required. + +Migration is done transparently, but there are some requirements that users should follow. + +Pod list is converted to V2 when the `fdp.personalStorage.list()` method is invoked. So before working with existing pods, call the `fdp.personalStorage.list()` method first. + +Directories are converted on the fly. Here the same principle applies as for pods. The `fdp.directory.read()` method must be invoked first, before invoking any other operation on the provided directory. The read method will convert not only the provided directory, but also all parent directories. That process happens only once, and all subsequent accesses will work with V2 data instantly. + +Existing files are accessible in the new version. But from the `0.18.0` version, files are compressed before upload, which wasn't the case before. To compress files, they must be reuploaded. + ## Documentation You can generate API docs locally with: diff --git a/jest-puppeteer.config.js b/jest-puppeteer.config.js index b721e2ca..e197e094 100644 --- a/jest-puppeteer.config.js +++ b/jest-puppeteer.config.js @@ -1,5 +1,7 @@ module.exports = { launch: { dumpio: true, // Forwards browser console into test console for easier debugging + headless: 'new', // Opt-in to the new headless mode for Chrome + channel: 'chrome', }, } diff --git a/jest.config.ts b/jest.config.ts index a99c71c5..05a19cea 100644 --- a/jest.config.ts +++ b/jest.config.ts @@ -44,6 +44,8 @@ export default async (): Promise => { // An array of directory names to be searched recursively up from the requiring module's location moduleDirectories: ['node_modules'], + testTimeout: 90000, + // Run tests from one or more projects projects: [ { @@ -67,6 +69,11 @@ export default async (): Promise => { testEnvironment: 'node', testRegex: 'test/integration/((?!\\.(browser|fairos)).)*\\.spec\\.ts', }, + { + displayName: 'node:migration', + testEnvironment: 'node', + testRegex: 'test/migration/((?!\\.(browser|fairos)).)*\\.spec\\.ts', + }, ] as unknown[] as string[], // bad types // The root directory that Jest should scan for tests and modules within diff --git a/package-lock.json b/package-lock.json index a607c9b4..5b6bbbd1 100644 --- a/package-lock.json +++ b/package-lock.json @@ -14,7 +14,8 @@ "crypto-js": "^4.2.0", "elliptic": "^6.5.4", "ethers": "^5.5.2", - "js-sha3": "^0.9.2" + "js-sha3": "^0.9.2", + "pako": "^2.1.0" }, "devDependencies": { "@babel/core": "^7.23.2", @@ -36,6 +37,7 @@ "@types/jest": "^29.5.6", "@types/jest-environment-puppeteer": "^5.0.5", "@types/node": "^20.8.9", + "@types/pako": "^2.0.3", "@types/webpack-bundle-analyzer": "^4.6.2", "@types/ws": "^8.5.8", "@typescript-eslint/eslint-plugin": "^6.9.0", @@ -4850,6 +4852,12 @@ "integrity": "sha512-ehPtgRgaULsFG8x0NeYJvmyH1hmlfsNLujHe9dQEia/7MAJYdzMSi19JtchUHjmBA6XC/75dK55mzZH+RyieSg==", "dev": true }, + "node_modules/@types/pako": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@types/pako/-/pako-2.0.3.tgz", + "integrity": "sha512-bq0hMV9opAcrmE0Byyo0fY3Ew4tgOevJmQ9grUhpXQhYfyLJ1Kqg3P33JT5fdbT2AjeAjR51zqqVjAL/HMkx7Q==", + "dev": true + }, "node_modules/@types/parse-json": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.0.tgz", @@ -12591,6 +12599,11 @@ "node": ">= 14" } }, + "node_modules/pako": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/pako/-/pako-2.1.0.tgz", + "integrity": "sha512-w+eufiZ1WuJYgPXbV/PO3NCMEc3xqylkKHzp8bxp1uW4qaSNQUkwmLLEc3kKsfz8lpV1F8Ht3U1Cm+9Srog2ug==" + }, "node_modules/parent-module": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", @@ -18797,6 +18810,12 @@ "integrity": "sha512-ehPtgRgaULsFG8x0NeYJvmyH1hmlfsNLujHe9dQEia/7MAJYdzMSi19JtchUHjmBA6XC/75dK55mzZH+RyieSg==", "dev": true }, + "@types/pako": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@types/pako/-/pako-2.0.3.tgz", + "integrity": "sha512-bq0hMV9opAcrmE0Byyo0fY3Ew4tgOevJmQ9grUhpXQhYfyLJ1Kqg3P33JT5fdbT2AjeAjR51zqqVjAL/HMkx7Q==", + "dev": true + }, "@types/parse-json": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.0.tgz", @@ -24418,6 +24437,11 @@ "netmask": "^2.0.2" } }, + "pako": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/pako/-/pako-2.1.0.tgz", + "integrity": "sha512-w+eufiZ1WuJYgPXbV/PO3NCMEc3xqylkKHzp8bxp1uW4qaSNQUkwmLLEc3kKsfz8lpV1F8Ht3U1Cm+9Srog2ug==" + }, "parent-module": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", diff --git a/package.json b/package.json index 3e4d2ea1..b73dde0b 100644 --- a/package.json +++ b/package.json @@ -43,6 +43,7 @@ "test:unit:browser": "npm run compile:browser && jest --verbose --selectProjects=dom:unit:browser --config=jest.config.ts", "test:integration:node": "jest --verbose --selectProjects=node:integration --config=jest.config.ts", "test:integration": "npm run compile:browser && jest --verbose --selectProjects=node:integration dom:integration --config=jest.config.ts", + "test:migration": "jest --verbose --selectProjects=node:migration --config=jest.config.ts", "test:unit": "jest --verbose --selectProjects=node:unit --config=jest.config.ts ", "test:node": "jest --verbose --selectProjects=node:unit node:integration --config=jest.config.ts", "test:fairos": "jest --verbose --selectProjects=node:fairos --config=jest.config.ts", @@ -58,7 +59,8 @@ "crypto-js": "^4.2.0", "elliptic": "^6.5.4", "ethers": "^5.5.2", - "js-sha3": "^0.9.2" + "js-sha3": "^0.9.2", + "pako": "^2.1.0" }, "devDependencies": { "@babel/core": "^7.23.2", @@ -80,6 +82,7 @@ "@types/jest": "^29.5.6", "@types/jest-environment-puppeteer": "^5.0.5", "@types/node": "^20.8.9", + "@types/pako": "^2.0.3", "@types/webpack-bundle-analyzer": "^4.6.2", "@types/ws": "^8.5.8", "@typescript-eslint/eslint-plugin": "^6.9.0", diff --git a/puppeteer.config.js b/puppeteer.config.js new file mode 100644 index 00000000..85e76c34 --- /dev/null +++ b/puppeteer.config.js @@ -0,0 +1,11 @@ +// eslint-disable-next-line @typescript-eslint/no-var-requires +const { join } = require('path') + +/** + * @type {import("puppeteer").Configuration} + */ +module.exports = { + // Changes the cache location for Puppeteer. + cacheDirectory: join(__dirname, '.cache', 'puppeteer-data'), + skipDownload: false, +} diff --git a/src/account/account.ts b/src/account/account.ts index 8cc37f59..a274c4c2 100644 --- a/src/account/account.ts +++ b/src/account/account.ts @@ -1,10 +1,11 @@ -import { Bee, PrivateKeyBytes, Reference, Utils } from '@ethersphere/bee-js' +import { Bee, PrivateKeyBytes, Reference } from '@ethersphere/bee-js' import { utils, Wallet } from 'ethers' import { IV_LENGTH, decryptBytes, encryptBytes } from '../utils/encryption' import { assertChunkSizeLength, CHUNK_SIZE, SEED_SIZE, createCredentialsTopic, HD_PATH } from './utils' import { Connection } from '../connection/connection' import CryptoJS from 'crypto-js' import { wordArrayToBytes } from '../utils/bytes' +import { EthAddress } from '../utils/eth' /** * User account with seed phrase @@ -54,7 +55,7 @@ export async function uploadPortableAccount( */ export async function downloadPortableAccount( bee: Bee, - address: Utils.EthAddress, + address: EthAddress, username: string, password: string, ): Promise { diff --git a/src/account/index.ts b/src/account/index.ts index e62083bb..69065539 100644 --- a/src/account/index.ts +++ b/src/account/index.ts @@ -1,4 +1,3 @@ export * as Account from './account' export * as Encryption from '../utils/encryption' export * as Utils from './utils' -//TODO export every exportable diff --git a/src/content-items/handler.ts b/src/content-items/handler.ts index 4cc4776c..bbfaad81 100644 --- a/src/content-items/handler.ts +++ b/src/content-items/handler.ts @@ -1,18 +1,21 @@ import { Connection } from '../connection/connection' import { utils } from 'ethers' -import { Reference, BeeRequestOptions } from '@ethersphere/bee-js' +import { BeeRequestOptions, Signer } from '@ethersphere/bee-js' import { getUnixTimestamp } from '../utils/time' -import { writeFeedData } from '../feed/api' -import { getRawDirectoryMetadataBytes } from '../directory/adapter' -import { DIRECTORY_TOKEN, FILE_TOKEN } from '../file/handler' -import { assertRawDirectoryMetadata, combine, splitPath } from '../directory/utils' -import { RawDirectoryMetadata } from '../pod/types' -import { deleteFeedData, getPathInfo, getRawMetadata } from './utils' +import { DIRECTORY_TOKEN, FILE_TOKEN, prepareDataByMeta, uploadData } from '../file/handler' +import { assertRawDirectoryMetadata, assertRawFileMetadata, combine, splitPath } from '../directory/utils' +import { FileMetadataWithLookupAnswer, RawDirectoryMetadata } from '../pod/types' +import { deleteFeedData, getIndexFilePath, getPathInfo, getRawMetadata } from './utils' import { RawMetadataWithEpoch } from './types' import { prepareEthAddress } from '../utils/wallet' import { PodPasswordBytes } from '../utils/encryption' -import { DataUploadOptions } from '../file/types' -import { getNextEpoch } from '../feed/lookup/utils' +import { Compression, DataUploadOptions } from '../file/types' +import { rawFileMetadataToFileMetadata } from '../file/adapter' +import { downloadBlocksManifest } from '../file/utils' +import { bytesToString, stringToBytes } from '../utils/bytes' +import { jsonParse } from '../utils/json' +import { AccountData } from '../account/account-data' +import { EthAddress } from '../utils/eth' /** * Minimum block size for uploading data @@ -25,48 +28,65 @@ export const DEFAULT_UPLOAD_OPTIONS: DataUploadOptions = { } /** - * Add child file or directory to a defined parent directory + * Adds an entry to a directory. * - * @param connection connection information for data management - * @param wallet wallet of the pod - * @param podPassword bytes for data encryption from pod metadata - * @param parentPath parent path - * @param entryPath entry path - * @param isFile define if entry is file or directory - * @param downloadOptions download options + * @param {AccountData} accountData - The account data. + * @param {EthAddress} socOwnerAddress - The address of the owner of the Single Owner Chunk. + * @param {(string|Uint8Array|Signer)} socSigner - The signer for the Single Owner Chunk. + * @param {PodPasswordBytes} encryptionPassword - The encryption password. + * @param {string} parentPath - The parent directory path. + * @param {string} entryName - The name of the entry to be added. + * @param {boolean} isFile - Specifies whether the entry is a file (true) or a directory (false). + * @param {BeeRequestOptions} [downloadOptions] - The download options for retrieving metadata. + * + * @returns {Promise} - The metadata of the file with lookup answer. + * + * @throws {Error} If the parent path is incorrect. + * @throws {Error} If the entry name is incorrect. + * @throws {Error} If the parent directory does not exist. + * @throws {Error} If the item already exists in the parent directory list. */ export async function addEntryToDirectory( - connection: Connection, - wallet: utils.HDNode, - podPassword: PodPasswordBytes, + accountData: AccountData, + socOwnerAddress: EthAddress, + socSigner: string | Uint8Array | Signer, + encryptionPassword: PodPasswordBytes, parentPath: string, - entryPath: string, + entryName: string, isFile: boolean, downloadOptions?: BeeRequestOptions, -): Promise { +): Promise { if (!parentPath) { throw new Error('Incorrect parent path') } - if (!entryPath) { - throw new Error('Incorrect entry path') + if (!entryName) { + throw new Error('Incorrect entry name') } - const address = prepareEthAddress(wallet.address) + const bee = accountData.connection.bee const itemText = isFile ? 'File' : 'Directory' - const fullPath = combine(...splitPath(parentPath), entryPath) + const fullPath = combine(...splitPath(parentPath), entryName) let parentData: RawDirectoryMetadata | undefined let metadataWithEpoch: RawMetadataWithEpoch | undefined + const indexFilePath = getIndexFilePath(parentPath) try { - metadataWithEpoch = await getRawMetadata(connection.bee, parentPath, address, podPassword, downloadOptions) - assertRawDirectoryMetadata(metadataWithEpoch.metadata) - parentData = metadataWithEpoch.metadata + metadataWithEpoch = await getRawMetadata(bee, indexFilePath, socOwnerAddress, encryptionPassword, downloadOptions) + assertRawFileMetadata(metadataWithEpoch.metadata) + const fileMeta = rawFileMetadataToFileMetadata(metadataWithEpoch.metadata) + const blocks = await downloadBlocksManifest(bee, fileMeta.blocksReference, downloadOptions) + const directoryMeta = jsonParse( + bytesToString(await prepareDataByMeta(fileMeta, blocks.blocks, bee, downloadOptions)), + 'addEntryToDirectory', + ) + assertRawDirectoryMetadata(directoryMeta) + parentData = directoryMeta } catch (e) { throw new Error('Parent directory does not exist') } - const itemToAdd = (isFile ? FILE_TOKEN : DIRECTORY_TOKEN) + entryPath + const itemToAdd = (isFile ? FILE_TOKEN : DIRECTORY_TOKEN) + entryName parentData.fileOrDirNames = parentData.fileOrDirNames ?? [] if (parentData.fileOrDirNames.includes(itemToAdd)) { @@ -76,13 +96,102 @@ export async function addEntryToDirectory( parentData.fileOrDirNames.push(itemToAdd) parentData.meta.modificationTime = getUnixTimestamp() - return writeFeedData( - connection, - parentPath, - getRawDirectoryMetadataBytes(parentData), - wallet, - podPassword, - getNextEpoch(metadataWithEpoch.epoch), + return uploadData( + accountData.connection, + socOwnerAddress, + socSigner, + encryptionPassword, + indexFilePath, + stringToBytes(JSON.stringify(parentData)), + { + compression: Compression.GZIP, + }, + ) +} + +/** + * Adds multiple entries to a directory. + * + * @param {AccountData} accountData - The account data. + * @param {EthAddress} socOwnerAddress - The address of the owner of the Single Owner Chunk. + * @param {(string|Uint8Array|Signer)} socSigner - The signer for the Single Owner Chunk. + * @param {PodPasswordBytes} encryptionPassword - The encryption password. + * @param {string} parentPath - The parent directory path. + * @param {string} entryNames - An array of entry names to be added. + * @param {boolean} isFile - Specifies whether each entry is a file (true) or a directory (false). + * @param {BeeRequestOptions} [downloadOptions] - The download options for retrieving metadata. + * + * @returns {Promise} - The metadata of the file with lookup answer. + * + * @throws {Error} If the parent path is incorrect. + * @throws {Error} If the entry name is incorrect. + * @throws {Error} If the parent directory does not exist. + * @throws {Error} If the item already exists in the parent directory list. + */ +export async function addEntriesToDirectory( + accountData: AccountData, + socOwnerAddress: EthAddress, + socSigner: string | Uint8Array | Signer, + encryptionPassword: PodPasswordBytes, + parentPath: string, + entryNames: string[], + isFileArray: boolean[], + downloadOptions?: BeeRequestOptions, +): Promise { + if (!parentPath) { + throw new Error('Incorrect parent path') + } + + if (entryNames.some(value => !value)) { + throw new Error('Incorrect entry name') + } + + const bee = accountData.connection.bee + + let parentData: RawDirectoryMetadata + let metadataWithEpoch: RawMetadataWithEpoch | undefined + const indexFilePath = getIndexFilePath(parentPath) + try { + metadataWithEpoch = await getRawMetadata(bee, indexFilePath, socOwnerAddress, encryptionPassword, downloadOptions) + assertRawFileMetadata(metadataWithEpoch.metadata) + const fileMeta = rawFileMetadataToFileMetadata(metadataWithEpoch.metadata) + const blocks = await downloadBlocksManifest(bee, fileMeta.blocksReference, downloadOptions) + const directoryMeta = jsonParse( + bytesToString(await prepareDataByMeta(fileMeta, blocks.blocks, bee, downloadOptions)), + 'addEntryToDirectory', + ) + assertRawDirectoryMetadata(directoryMeta) + parentData = directoryMeta + } catch (e) { + throw new Error('Parent directory does not exist') + } + + entryNames.forEach((entryName, index) => { + const isFile = isFileArray[index] + const itemText = isFile ? 'File' : 'Directory' + const fullPath = combine(...splitPath(parentPath), entryName) + + const itemToAdd = (isFile ? FILE_TOKEN : DIRECTORY_TOKEN) + entryName + parentData.fileOrDirNames = parentData.fileOrDirNames ?? [] + + if (parentData.fileOrDirNames.includes(itemToAdd)) { + throw new Error(`${itemText} "${fullPath}" already listed in the parent directory list`) + } + + parentData.fileOrDirNames.push(itemToAdd) + parentData.meta.modificationTime = getUnixTimestamp() + }) + + return uploadData( + accountData.connection, + socOwnerAddress, + socSigner, + encryptionPassword, + indexFilePath, + stringToBytes(JSON.stringify(parentData)), + { + compression: Compression.GZIP, + }, ) } @@ -129,7 +238,7 @@ export async function deleteItem( * * @param connection connection information for data management * @param wallet wallet of the pod for downloading and uploading metadata - * @param podPassword bytes for data encryption from pod metadata + * @param encryptionPassword bytes for data encryption from pod metadata * @param parentPath parent path of the entry * @param entryPath full path of the entry * @param isFile define if entry is file or directory @@ -138,24 +247,32 @@ export async function deleteItem( export async function removeEntryFromDirectory( connection: Connection, wallet: utils.HDNode, - podPassword: PodPasswordBytes, + encryptionPassword: PodPasswordBytes, parentPath: string, entryPath: string, isFile: boolean, downloadOptions?: BeeRequestOptions, -): Promise { +): Promise { + const indexFilePath = getIndexFilePath(parentPath) const metadataWithEpoch = await getRawMetadata( connection.bee, - parentPath, + indexFilePath, prepareEthAddress(wallet.address), - podPassword, + encryptionPassword, downloadOptions, ) - const parentData = metadataWithEpoch.metadata - assertRawDirectoryMetadata(parentData) + const indexFileMeta = metadataWithEpoch.metadata + assertRawFileMetadata(indexFileMeta) + const fileMeta = rawFileMetadataToFileMetadata(indexFileMeta) + const blocks = await downloadBlocksManifest(connection.bee, fileMeta.blocksReference, downloadOptions) + const directoryMeta = jsonParse( + bytesToString(await prepareDataByMeta(fileMeta, blocks.blocks, connection.bee, downloadOptions)), + 'removeEntryFromDirectory', + ) + assertRawDirectoryMetadata(directoryMeta) + const parentData = directoryMeta const itemToRemove = (isFile ? FILE_TOKEN : DIRECTORY_TOKEN) + entryPath const fileOrDirNames = parentData.fileOrDirNames || [] - const fullPath = combine(parentPath, entryPath) if (!fileOrDirNames.includes(itemToRemove)) { @@ -163,14 +280,17 @@ export async function removeEntryFromDirectory( } parentData.fileOrDirNames = fileOrDirNames.filter(name => name !== itemToRemove) - await deleteItem(connection, fullPath, wallet, podPassword) + await deleteItem(connection, fullPath, wallet, encryptionPassword) - return writeFeedData( + return uploadData( connection, - parentPath, - getRawDirectoryMetadataBytes(parentData), - wallet, - podPassword, - getNextEpoch(metadataWithEpoch.epoch), + prepareEthAddress(wallet.address), + wallet.privateKey, + encryptionPassword, + indexFilePath, + stringToBytes(JSON.stringify(parentData)), + { + compression: Compression.GZIP, + }, ) } diff --git a/src/content-items/utils.ts b/src/content-items/utils.ts index 1a4a7ccc..bf85bf90 100644 --- a/src/content-items/utils.ts +++ b/src/content-items/utils.ts @@ -1,8 +1,7 @@ -import { Bee, Reference, BeeRequestOptions } from '@ethersphere/bee-js' -import { EthAddress } from '@ethersphere/bee-js/dist/types/utils/eth' +import { Bee, Reference, BeeRequestOptions, Data } from '@ethersphere/bee-js' import { RawDirectoryMetadata, RawFileMetadata } from '../pod/types' import { DELETE_FEED_MAGIC_WORD, getFeedData, writeFeedData } from '../feed/api' -import { isRawDirectoryMetadata, isRawFileMetadata } from '../directory/utils' +import { combine, isRawDirectoryMetadata, isRawFileMetadata, splitPath } from '../directory/utils' import { DirectoryItem, FileItem, PathInformation, RawMetadataWithEpoch } from './types' import { decryptJson, PodPasswordBytes } from '../utils/encryption' import CryptoJS from 'crypto-js' @@ -11,6 +10,34 @@ import { Connection } from '../connection/connection' import { utils, Wallet } from 'ethers' import { Epoch } from '../feed/lookup/epoch' import { stringToBytes } from '../utils/bytes' +import { INDEX_ITEM_NAME } from '../file/handler' +import { EthAddress } from '../utils/eth' + +/** + * Extracts metadata from encrypted source data using a pod password + * + * @param {Data} sourceData - The encrypted source data from which to extract metadata. + * @param {PodPasswordBytes} podPassword - The pod password used to decrypt the source data. + * @throws {Error} If the metadata is invalid. + * @returns {RawDirectoryMetadata | RawFileMetadata} The extracted metadata. + */ +export function extractMetadata( + sourceData: Data, + podPassword: PodPasswordBytes, +): RawDirectoryMetadata | RawFileMetadata { + const data = decryptJson(podPassword, sourceData) + let metadata + + if (isRawDirectoryMetadata(data)) { + metadata = data as RawDirectoryMetadata + } else if (isRawFileMetadata(data)) { + metadata = data as RawFileMetadata + } else { + throw new Error('Invalid metadata') + } + + return metadata +} /** * Get raw metadata by path @@ -29,20 +56,10 @@ export async function getRawMetadata( requestOptions?: BeeRequestOptions, ): Promise { const feedData = await getFeedData(bee, path, address, requestOptions) - const data = decryptJson(podPassword, feedData.data.chunkContent()) - let metadata - - if (isRawDirectoryMetadata(data)) { - metadata = data as RawDirectoryMetadata - } else if (isRawFileMetadata(data)) { - metadata = data as RawFileMetadata - } else { - throw new Error('Invalid metadata') - } return { epoch: feedData.epoch, - metadata, + metadata: extractMetadata(feedData.data.chunkContent(), podPassword), } } @@ -141,5 +158,15 @@ export async function deleteFeedData( podPassword: PodPasswordBytes, epoch?: Epoch, ): Promise { - return writeFeedData(connection, topic, stringToBytes(DELETE_FEED_MAGIC_WORD), wallet, podPassword, epoch) + return writeFeedData(connection, topic, stringToBytes(DELETE_FEED_MAGIC_WORD), wallet.privateKey, podPassword, epoch) +} + +/** + * Returns the index file path for a given path. + * + * @param {string} path - The path for which the index file path is needed. + * @return {string} - The index file path for the given path. + */ +export function getIndexFilePath(path: string): string { + return combine(...splitPath(path), INDEX_ITEM_NAME) } diff --git a/src/directory/directory.ts b/src/directory/directory.ts index ff2cb8b3..555c5f32 100644 --- a/src/directory/directory.ts +++ b/src/directory/directory.ts @@ -1,7 +1,7 @@ import { AccountData } from '../account/account-data' import { createDirectory, readDirectory, DEFAULT_UPLOAD_DIRECTORY_OPTIONS, UploadDirectoryOptions } from './handler' import { assertAccount } from '../account/utils' -import { removeEntryFromDirectory } from '../content-items/handler' +import { addEntryToDirectory, removeEntryFromDirectory } from '../content-items/handler' import { extractPathInfo, readBrowserFileAsBytes } from '../file/utils' import { assertPodName, getExtendedPodsListByAccountData } from '../pod/utils' import { isNode } from '../shim/utils' @@ -16,10 +16,12 @@ import { getUploadPath, BrowserFileInfo, NodeFileInfo, + migrateDirectoryV1ToV2, } from './utils' import { uploadData } from '../file/handler' import { assertNodeFileInfo, isBrowserFileInfo } from './types' import { DirectoryItem } from '../content-items/types' +import { prepareEthAddress } from '../utils/wallet' /** * Directory related class @@ -39,16 +41,62 @@ export class Directory { async read(podName: string, path: string, isRecursive?: boolean): Promise { assertAccount(this.accountData) assertPodName(podName) - const { podAddress, pod } = await getExtendedPodsListByAccountData(this.accountData, podName) - return readDirectory( - this.accountData.connection.bee, - path, - podAddress, - pod.password, - isRecursive, - this.accountData.connection.options?.requestOptions, - ) + const { podAddress, pod, podWallet } = await getExtendedPodsListByAccountData(this.accountData, podName) + + try { + return await readDirectory( + this.accountData, + podName, + path, + podAddress, + pod.password, + isRecursive, + this.accountData.connection.options?.requestOptions, + ) + } catch (error) { + if (!String(error).includes('Index content file not found')) { + throw error + } + + const folders = path.split('/').filter(part => part) + + for (let i = 0; i < folders.length; i++) { + const currentPath = `/${i === 0 ? '' : folders.slice(0, i).join('/')}` + + try { + await readDirectory( + this.accountData, + podName, + currentPath, + podAddress, + pod.password, + false, + this.accountData.connection.options?.requestOptions, + ) + } catch (error) { + await migrateDirectoryV1ToV2( + this.accountData, + currentPath, + podAddress, + pod.password, + podWallet, + false, + this.accountData.connection.options?.requestOptions, + ) + } + } + + return migrateDirectoryV1ToV2( + this.accountData, + path, + podAddress, + pod.password, + podWallet, + isRecursive, + this.accountData.connection.options?.requestOptions, + ) + } } /** @@ -65,7 +113,7 @@ export class Directory { const { podWallet, pod } = await getExtendedPodsListByAccountData(this.accountData, podName) return createDirectory( - this.accountData.connection, + this.accountData, fullPath, podWallet, pod.password, @@ -143,7 +191,7 @@ export class Directory { for (const directory of directoriesToCreate) { try { await createDirectory( - this.accountData.connection, + this.accountData, directory, podWallet, pod.password, @@ -171,7 +219,26 @@ export class Directory { } const uploadPath = getUploadPath(file, options.isIncludeDirectoryName!) - await uploadData(podName, uploadPath, bytes, this.accountData, options.uploadOptions!) + const socOwnerAddress = prepareEthAddress(podWallet.address) + await uploadData( + this.accountData.connection, + socOwnerAddress, + podWallet.privateKey, + pod.password, + uploadPath, + bytes, + options.uploadOptions!, + ) + const pathInfo = extractPathInfo(uploadPath) + await addEntryToDirectory( + this.accountData, + socOwnerAddress, + podWallet.privateKey, + pod.password, + pathInfo.path, + pathInfo.filename, + true, + ) } } } diff --git a/src/directory/handler.ts b/src/directory/handler.ts index dc9aa152..da1f711a 100644 --- a/src/directory/handler.ts +++ b/src/directory/handler.ts @@ -1,6 +1,4 @@ -import { writeFeedData } from '../feed/api' -import { EthAddress } from '@ethersphere/bee-js/dist/types/utils/eth' -import { Bee, Reference, BeeRequestOptions } from '@ethersphere/bee-js' +import { BeeRequestOptions, Reference, Signer } from '@ethersphere/bee-js' import { assertDirectoryName, assertPartsLength, @@ -11,24 +9,29 @@ import { getPathParts, splitPath, } from './utils' -import { DIRECTORY_TOKEN, FILE_TOKEN } from '../file/handler' +import { DIRECTORY_TOKEN, FILE_TOKEN, getIndexFileContent, uploadData } from '../file/handler' import { getUnixTimestamp } from '../utils/time' import { createRawDirectoryMetadata, META_VERSION } from '../pod/utils' import { Connection } from '../connection/connection' import { utils } from 'ethers' import { addEntryToDirectory, DEFAULT_UPLOAD_OPTIONS } from '../content-items/handler' import { + getCreationPathInfo, + getIndexFilePath, + getRawMetadata, rawDirectoryMetadataToDirectoryItem, rawFileMetadataToFileItem, - getRawMetadata, - getCreationPathInfo, } from '../content-items/utils' import { PodPasswordBytes } from '../utils/encryption' -import { DataUploadOptions } from '../file/types' +import { Compression, DataUploadOptions } from '../file/types' import { DirectoryItem } from '../content-items/types' import { prepareEthAddress } from '../utils/wallet' +import { AccountData } from '../account/account-data' +import { FileMetadataWithLookupAnswer, RawDirectoryMetadata } from '../pod/types' +import { writeFeedData } from '../feed/api' import { Epoch } from '../feed/lookup/epoch' import { getNextEpoch } from '../feed/lookup/utils' +import { EthAddress } from '../utils/eth' /** * Options for uploading a directory @@ -56,7 +59,8 @@ export const DEFAULT_UPLOAD_DIRECTORY_OPTIONS: UploadDirectoryOptions = { /** * Get files and directories under path with recursion or not * - * @param bee Bee instance + * @param {AccountData} accountData AccountData instace + * @param podName pod name * @param path path to start searching from * @param address Ethereum address of the pod which owns the path * @param podPassword bytes for data encryption from pod metadata @@ -64,15 +68,26 @@ export const DEFAULT_UPLOAD_DIRECTORY_OPTIONS: UploadDirectoryOptions = { * @param downloadOptions options for downloading */ export async function readDirectory( - bee: Bee, + accountData: AccountData, + podName: string, path: string, address: EthAddress, podPassword: PodPasswordBytes, isRecursive?: boolean, downloadOptions?: BeeRequestOptions, ): Promise { - const parentRawDirectoryMetadata = (await getRawMetadata(bee, path, address, podPassword, downloadOptions)).metadata - assertRawDirectoryMetadata(parentRawDirectoryMetadata) + let parentRawDirectoryMetadata: RawDirectoryMetadata | undefined + try { + parentRawDirectoryMetadata = await getIndexFileContent(accountData, podName, path, downloadOptions) + } catch (e) { + // it means the data is not available in V2 OR it is V1 + } + + if (!parentRawDirectoryMetadata) { + throw new Error('Index content file not found') + } + + const bee = accountData.connection.bee const resultDirectoryItem = rawDirectoryMetadataToDirectoryItem(parentRawDirectoryMetadata) if (!parentRawDirectoryMetadata.fileOrDirNames) { @@ -90,17 +105,29 @@ export async function readDirectory( resultDirectoryItem.files.push(rawFileMetadataToFileItem(data)) } else if (isDirectory) { item = combine(...splitPath(path), item.substring(DIRECTORY_TOKEN.length)) - const data = (await getRawMetadata(bee, item, address, podPassword, downloadOptions)).metadata - assertRawDirectoryMetadata(data) - const currentMetadata = rawDirectoryMetadataToDirectoryItem(data) + try { + const data = await getIndexFileContent(accountData, podName, item, downloadOptions) + assertRawDirectoryMetadata(data) + const currentMetadata = rawDirectoryMetadataToDirectoryItem(data) + resultDirectoryItem.directories.push(currentMetadata) + const allItems = data.fileOrDirNames ? data.fileOrDirNames : [] - if (isRecursive) { - const content = await readDirectory(bee, item, address, podPassword, isRecursive, downloadOptions) - currentMetadata.files = content.files - currentMetadata.directories = content.directories + if (isRecursive && allItems.length > 0) { + const content = await readDirectory( + accountData, + podName, + item, + address, + podPassword, + isRecursive, + downloadOptions, + ) + currentMetadata.files = content.files + currentMetadata.directories = content.directories + } + } catch (e) { + /* empty */ } - - resultDirectoryItem.directories.push(currentMetadata) } } @@ -111,56 +138,103 @@ export async function readDirectory( * Creates directory metadata for a given directory path and upload it to the network * * @param connection Bee connection + * @param socSigner feed owner's signer * @param path parent path * @param name name of the directory - * @param podPassword bytes for data encryption from pod metadata - * @param wallet feed owner's wallet - * @param epoch epoch where directory info should be uploaded + * @param encryptionPassword bytes for data encryption from pod metadata + * @param epoch feed epoch */ async function createDirectoryInfo( connection: Connection, + socSigner: string | Uint8Array | Signer, path: string, name: string, - podPassword: PodPasswordBytes, - wallet: utils.HDNode, + encryptionPassword: PodPasswordBytes, epoch?: Epoch, ): Promise { const now = getUnixTimestamp() const metadata = createRawDirectoryMetadata(META_VERSION, path, name, now, now, now) + const fullPath = combine(...splitPath(path), name) - return writeFeedData(connection, combine(...splitPath(path), name), metadata, wallet, podPassword, epoch) + return writeFeedData(connection, fullPath, metadata, socSigner, encryptionPassword, epoch) } /** * Creates root directory for the pod that tied to the private key * * @param connection Bee connection - * @param podPassword bytes for data encryption + * @param encryptionPassword data encryption password * @param wallet feed owner's wallet */ export async function createRootDirectory( connection: Connection, - podPassword: PodPasswordBytes, + encryptionPassword: PodPasswordBytes, wallet: utils.HDNode, -): Promise { - return createDirectoryInfo(connection, '', '/', podPassword, wallet) +): Promise { + return createDirectoryIndexFile(connection, '', '/', encryptionPassword, wallet) +} + +/** + * Creates a directory index file and uploads it to the specified path. + * + * @param {Connection} connection - The connection object. + * @param {string} path - The path of the directory where the index file will be created. + * @param {string} name - The name of the directory to be created. + * @param {PodPasswordBytes} encryptionPassword - The encryption password for the data. + * @param {utils.HDNode} wallet - The wallet object used for signing the requests. + * + * @returns {Promise} - A promise that resolves to the uploaded file's metadata and lookup answer. + */ +export async function createDirectoryIndexFile( + connection: Connection, + path: string, + name: string, + encryptionPassword: PodPasswordBytes, + wallet: utils.HDNode, +): Promise { + const fullPath = combine(...splitPath(path), name) + const socOwnerAddress = prepareEthAddress(wallet.address) + const socSigner = wallet.privateKey + const pathInfo = await getCreationPathInfo( + connection.bee, + fullPath, + socOwnerAddress, + connection.options?.requestOptions, + ) + await createDirectoryInfo( + connection, + socSigner, + path, + name, + encryptionPassword, + getNextEpoch(pathInfo?.lookupAnswer.epoch), + ) + const now = getUnixTimestamp() + const data = createRawDirectoryMetadata(META_VERSION, path, name, now, now, now) + const indexFilePath = getIndexFilePath(fullPath) + + return uploadData(connection, socOwnerAddress, socSigner, encryptionPassword, indexFilePath, data, { + compression: Compression.GZIP, + }) } /** * Creates directory under the pod * - * @param connection Bee connection + * @param {AccountData} accountData account data * @param fullPath path to the directory * @param podWallet pod wallet * @param podPassword bytes for decrypting pod content * @param downloadOptions options for downloading + * @param updateParentDirectory used for migration to V2 only */ export async function createDirectory( - connection: Connection, + accountData: AccountData, fullPath: string, podWallet: utils.HDNode, podPassword: PodPasswordBytes, downloadOptions?: BeeRequestOptions, + updateParentDirectory = true, ): Promise { const parts = getPathParts(fullPath) assertPartsLength(parts) @@ -168,19 +242,37 @@ export async function createDirectory( assertDirectoryName(name) const parentPath = getPathFromParts(parts, 1) + const podAddress = prepareEthAddress(podWallet.address) + // get epoch data in case of the directory previously existed to write on the correct level const pathInfo = await getCreationPathInfo( - connection.bee, + accountData.connection.bee, fullPath, - prepareEthAddress(podWallet.address), - connection.options?.requestOptions, + podAddress, + accountData.connection.options?.requestOptions, ) - await addEntryToDirectory(connection, podWallet, podPassword, parentPath, name, false, downloadOptions) + + if (updateParentDirectory) { + await addEntryToDirectory( + accountData, + podAddress, + podWallet.privateKey, + podPassword, + parentPath, + name, + false, + downloadOptions, + ) + } + await createDirectoryInfo( - connection, - parentPath, + accountData.connection, + podWallet.privateKey, + fullPath, name, podPassword, - podWallet, + // pass calculated epoch to write on correct level in case of the directory previously existed getNextEpoch(pathInfo?.lookupAnswer.epoch), ) + + await createDirectoryIndexFile(accountData.connection, parentPath, name, podPassword, podWallet) } diff --git a/src/directory/utils.ts b/src/directory/utils.ts index 0b24be6f..668eb293 100644 --- a/src/directory/utils.ts +++ b/src/directory/utils.ts @@ -1,4 +1,4 @@ -import { MAX_DIRECTORY_NAME_LENGTH } from './handler' +import { MAX_DIRECTORY_NAME_LENGTH, createDirectory, createRootDirectory } from './handler' import { RawDirectoryMetadata, RawFileMetadata } from '../pod/types' import { assertString, isNumber, isString } from '../utils/type' import { replaceAll } from '../utils/string' @@ -6,6 +6,16 @@ import * as fs from 'fs' import * as nodePath from 'path' import { isNode } from '../shim/utils' import { getBaseName } from '../file/utils' +import { Bee, BeeRequestOptions } from '@ethersphere/bee-js' +import { getRawMetadata, rawDirectoryMetadataToDirectoryItem, rawFileMetadataToFileItem } from '../content-items/utils' +import { EthAddress } from '../utils/eth' +import { PodPasswordBytes } from '../utils/encryption' +import { DirectoryItem } from '../types' +import { DIRECTORY_TOKEN, FILE_TOKEN } from '../file/handler' +import { AccountData } from '../account/account-data' +import { HDNode } from 'ethers/lib/utils' +import { prepareEthAddress } from '../utils/wallet' +import { addEntriesToDirectory } from '../content-items/handler' /** * Default directory permission in octal format @@ -366,3 +376,91 @@ export function getUploadPath(fileInfo: FileInfo, isIncludeDirectoryName: boolea export function getDirectoryMode(mode: number): number { return DIRECTORY_MODE | mode } + +/** + * Depricated, used for migration only + * Get files and directories under path with recursion or not + * + * @param bee Bee instance + * @param path path to start searching from + * @param address Ethereum address of the pod which owns the path + * @param podPassword bytes for data encryption from pod metadata + * @param isRecursive search with recursion or not + * @param downloadOptions options for downloading + */ +export async function readDirectoryV1( + bee: Bee, + path: string, + address: EthAddress, + podPassword: PodPasswordBytes, + isRecursive?: boolean, + downloadOptions?: BeeRequestOptions, +): Promise { + const parentRawDirectoryMetadata = (await getRawMetadata(bee, path, address, podPassword, downloadOptions)).metadata + assertRawDirectoryMetadata(parentRawDirectoryMetadata) + const resultDirectoryItem = rawDirectoryMetadataToDirectoryItem(parentRawDirectoryMetadata) + + if (!parentRawDirectoryMetadata.fileOrDirNames) { + return resultDirectoryItem + } + + for (let item of parentRawDirectoryMetadata.fileOrDirNames) { + const isFile = item.startsWith(FILE_TOKEN) + const isDirectory = item.startsWith(DIRECTORY_TOKEN) + + if (isFile) { + item = combine(...splitPath(path), item.substring(FILE_TOKEN.length)) + const data = (await getRawMetadata(bee, item, address, podPassword, downloadOptions)).metadata + assertRawFileMetadata(data) + resultDirectoryItem.files.push(rawFileMetadataToFileItem(data)) + } else if (isDirectory) { + item = combine(...splitPath(path), item.substring(DIRECTORY_TOKEN.length)) + const data = (await getRawMetadata(bee, item, address, podPassword, downloadOptions)).metadata + assertRawDirectoryMetadata(data) + const currentMetadata = rawDirectoryMetadataToDirectoryItem(data) + + if (isRecursive) { + const content = await readDirectoryV1(bee, item, address, podPassword, isRecursive, downloadOptions) + currentMetadata.files = content.files + currentMetadata.directories = content.directories + } + + resultDirectoryItem.directories.push(currentMetadata) + } + } + + return resultDirectoryItem +} + +export async function migrateDirectoryV1ToV2( + accountData: AccountData, + path: string, + address: EthAddress, + podPassword: PodPasswordBytes, + podWallet: HDNode, + isRecursive?: boolean, + downloadOptions?: BeeRequestOptions, +): Promise { + const directoryItem = await readDirectoryV1( + accountData.connection.bee, + path, + address, + podPassword, + isRecursive, + downloadOptions, + ) + + if (path === '/') { + await createRootDirectory(accountData.connection, podPassword, podWallet) + } else { + await createDirectory(accountData, path, podWallet, podPassword, downloadOptions, false) + } + + const socOwnerAddress = prepareEthAddress(podWallet.address) + const entries = directoryItem.directories.map(({ name }) => name).concat(directoryItem.files.map(({ name }) => name)) + const isFile = directoryItem.directories.map(() => false).concat(directoryItem.files.map(() => true)) + + await addEntriesToDirectory(accountData, socOwnerAddress, podWallet.privateKey, podPassword, path, entries, isFile) + + return directoryItem +} diff --git a/src/feed/api.ts b/src/feed/api.ts index 743faaa7..aa52f5e0 100644 --- a/src/feed/api.ts +++ b/src/feed/api.ts @@ -1,14 +1,14 @@ -import { Bee, Data, Reference, BeeRequestOptions, Utils } from '@ethersphere/bee-js' +import { Bee, Data, Reference, BeeRequestOptions, Utils, Signer } from '@ethersphere/bee-js' import { bmtHashString } from '../account/utils' import { getId } from './handler' import { lookup } from './lookup/linear' -import { Epoch, HIGHEST_LEVEL } from './lookup/epoch' +import { Epoch, getFirstEpoch } from './lookup/epoch' import { bytesToHex } from '../utils/hex' import { getUnixTimestamp } from '../utils/time' import { LookupAnswer } from './types' import { Connection } from '../connection/connection' import { encryptBytes, PodPasswordBytes } from '../utils/encryption' -import { utils, Wallet } from 'ethers' +import { EthAddress } from '../utils/eth' /** * Magic word for replacing content after deletion @@ -26,7 +26,7 @@ export const DELETE_FEED_MAGIC_WORD = '__Fair__' export async function getFeedData( bee: Bee, topic: string, - address: Utils.EthAddress | Uint8Array, + address: EthAddress | Uint8Array, requestOptions?: BeeRequestOptions, ): Promise { const topicHash = bmtHashString(topic) @@ -45,26 +45,34 @@ export async function getFeedData( * @param connection connection information for data uploading * @param topic key for data * @param data data to upload - * @param wallet feed owner's wallet - * @param podPassword bytes for data encryption from pod metadata + * @param socSigner feed owner's signer + * @param encryptionPassword data encryption password * @param epoch feed epoch */ export async function writeFeedData( connection: Connection, topic: string, data: Uint8Array, - wallet: utils.HDNode | Wallet, - podPassword: PodPasswordBytes, + socSigner: string | Uint8Array | Signer, + encryptionPassword: PodPasswordBytes, epoch?: Epoch, ): Promise { - if (!epoch) { - epoch = new Epoch(HIGHEST_LEVEL, getUnixTimestamp()) - } - data = encryptBytes(podPassword, data) - + epoch = prepareEpoch(epoch) + data = encryptBytes(encryptionPassword, data) const topicHash = bmtHashString(topic) const id = getId(topicHash, epoch.time, epoch.level) - const socWriter = connection.bee.makeSOCWriter(wallet.privateKey) + const socWriter = connection.bee.makeSOCWriter(socSigner) return socWriter.upload(connection.postageBatchId, id, data) } + +/** + * Prepares an epoch for further processing. + * + * @param {Epoch} [epoch] - The epoch to prepare. If not provided, a new epoch will be created. + * + * @return {Epoch} The prepared epoch. + */ +export function prepareEpoch(epoch?: Epoch): Epoch { + return epoch ?? getFirstEpoch(getUnixTimestamp()) +} diff --git a/src/file/file.ts b/src/file/file.ts index 0b72cb81..5c6b4b9f 100644 --- a/src/file/file.ts +++ b/src/file/file.ts @@ -28,6 +28,7 @@ import { getRawMetadata } from '../content-items/utils' import { assertRawFileMetadata, combine, splitPath } from '../directory/utils' import { assertEncryptedReference, EncryptedReference } from '../utils/hex' import { assertBatchId } from '../utils/string' +import { prepareEthAddress } from '../utils/wallet' /** * Files management class @@ -78,7 +79,31 @@ export class File { assertAccount(this.accountData, { writeRequired: true }) assertPodName(podName) - return uploadData(podName, fullPath, data, this.accountData, options) + const { podWallet, pod } = await getExtendedPodsListByAccountData(this.accountData, podName) + const socOwnerAddress = prepareEthAddress(podWallet.address) + const meta = ( + await uploadData( + this.accountData.connection, + socOwnerAddress, + podWallet.privateKey, + pod.password, + fullPath, + data, + options, + ) + ).meta + const pathInfo = extractPathInfo(fullPath) + await addEntryToDirectory( + this.accountData, + socOwnerAddress, + podWallet.privateKey, + pod.password, + pathInfo.path, + pathInfo.filename, + true, + ) + + return meta } /** @@ -165,12 +190,21 @@ export class File { const sharedInfo = await this.getSharedInfo(reference) const connection = this.accountData.connection const { podWallet, pod } = await getExtendedPodsListByAccountData(this.accountData, podName) + const socOwnerAddress = prepareEthAddress(podWallet.address) let meta = rawFileMetadataToFileMetadata(sharedInfo.meta) const fileName = options?.name ?? sharedInfo.meta.fileName meta = updateFileMetadata(meta, parentPath, fileName) const fullPath = combine(...splitPath(parentPath), fileName) - await addEntryToDirectory(connection, podWallet, pod.password, parentPath, fileName, true) - await writeFeedData(connection, fullPath, getFileMetadataRawBytes(meta), podWallet, pod.password) + await addEntryToDirectory( + this.accountData, + socOwnerAddress, + podWallet.privateKey, + pod.password, + parentPath, + fileName, + true, + ) + await writeFeedData(connection, fullPath, getFileMetadataRawBytes(meta), podWallet.privateKey, pod.password) return meta } @@ -182,12 +216,13 @@ export class File { * * @param block block data * @param blockIndex block index + * @param originalSize original size of the block */ - async uploadDataBlock(block: Uint8Array, blockIndex: number): Promise { + async uploadDataBlock(block: Uint8Array, blockIndex: number, originalSize: number): Promise { assertBatchId(this.accountData.connection.postageBatchId) return { - ...(await uploadDataBlock(this.accountData.connection, block)), + ...(await uploadDataBlock(this.accountData.connection, block, originalSize)), index: blockIndex, } } diff --git a/src/file/handler.ts b/src/file/handler.ts index eb55d6d3..4a81bbdc 100644 --- a/src/file/handler.ts +++ b/src/file/handler.ts @@ -1,8 +1,6 @@ import { assertMinLength, stringToBytes, wrapBytesWithHelpers } from '../utils/bytes' -import { Bee, Data, BeeRequestOptions } from '@ethersphere/bee-js' -import { EthAddress } from '@ethersphere/bee-js/dist/types/utils/eth' +import { Bee, BeeRequestOptions, Data, Signer } from '@ethersphere/bee-js' import { - assertFullPathWithName, assertSequenceOfExternalDataBlocksCorrect, calcUploadBlockPercentage, DEFAULT_FILE_PERMISSIONS, @@ -16,14 +14,15 @@ import { updateUploadProgress, uploadBytes, } from './utils' -import { FileMetadata } from '../pod/types' +import { FileMetadata, FileMetadataWithLookupAnswer, RawDirectoryMetadata } from '../pod/types' import { blocksToManifest, getFileMetadataRawBytes, rawFileMetadataToFileMetadata } from './adapter' -import { assertRawFileMetadata } from '../directory/utils' -import { getCreationPathInfo, getRawMetadata } from '../content-items/utils' +import { assertRawDirectoryMetadata, assertRawFileMetadata } from '../directory/utils' +import { getCreationPathInfo, getIndexFilePath, getRawMetadata } from '../content-items/utils' import { PodPasswordBytes } from '../utils/encryption' import { Block, Blocks, + Compression, DataDownloadOptions, DataUploadOptions, DownloadProgressType, @@ -31,15 +30,17 @@ import { FileMetadataWithBlocks, UploadProgressType, } from './types' -import { assertPodName, getExtendedPodsListByAccountData, META_VERSION } from '../pod/utils' +import { getExtendedPodsListByAccountData, META_VERSION } from '../pod/utils' import { getUnixTimestamp } from '../utils/time' -import { addEntryToDirectory, DEFAULT_UPLOAD_OPTIONS, MINIMUM_BLOCK_SIZE } from '../content-items/handler' -import { writeFeedData } from '../feed/api' +import { DEFAULT_UPLOAD_OPTIONS, MINIMUM_BLOCK_SIZE } from '../content-items/handler' +import { prepareEpoch, writeFeedData } from '../feed/api' import { AccountData } from '../account/account-data' -import { prepareEthAddress } from '../utils/wallet' -import { assertWallet } from '../utils/type' import { getNextEpoch } from '../feed/lookup/utils' import { Connection } from '../connection/connection' +import { compress, decompress } from '../utils/compression' +import { wrapChunkHelper } from '../feed/utils' +import { jsonParse } from '../utils/json' +import { EthAddress } from '../utils/eth' /** * File prefix @@ -50,6 +51,12 @@ export const FILE_TOKEN = '_F_' */ export const DIRECTORY_TOKEN = '_D_' +/** + * Index item for storing data with files and directories items + * https://github.com/fairDataSociety/FIPs/pull/75 + */ +export const INDEX_ITEM_NAME = 'index.dfs' + /** * Get converted metadata by path * @@ -95,14 +102,9 @@ export async function getFileMetadataWithBlocks( const { podAddress, pod } = await getExtendedPodsListByAccountData(accountData, podName) updateDownloadProgress(dataDownloadOptions, DownloadProgressType.GetPathInfo) const fileMetadata = await getFileMetadata(bee, fullPath, podAddress, pod.password, downloadOptions) - - if (fileMetadata.compression) { - // TODO: implement compression support - throw new Error('Compressed data is not supported yet') - } - updateDownloadProgress(dataDownloadOptions, DownloadProgressType.DownloadBlocksMeta) const blocks = await downloadBlocksManifest(bee, fileMetadata.blocksReference, downloadOptions) + await prepareDataByMeta(fileMetadata, blocks.blocks, accountData.connection.bee, downloadOptions) return { ...fileMetadata, @@ -111,32 +113,23 @@ export async function getFileMetadataWithBlocks( } /** - * Downloads file parts and compile them into Data + * Downloads entire data using metadata. * - * @param accountData account data - * @param podName pod name - * @param fullPath full path to the file - * @param downloadOptions download options - * @param dataDownloadOptions data download options + * @param {FileMetadata} meta - The metadata of the file to download. + * @param {Block[]} blocks - The array of blocks to download. + * @param {Bee} bee - The Bee object to use for downloading. + * @param {BeeRequestOptions} [downloadOptions] - The options to be passed to the bee.downloadData() method. + * @param {DataDownloadOptions} [dataDownloadOptions] - The options for tracking progress during downloading. + * @returns {Promise} - A promise that resolves with the downloaded data. */ -export async function downloadData( - accountData: AccountData, - podName: string, - fullPath: string, +export async function prepareDataByMeta( + meta: FileMetadata, + blocks: Block[], + bee: Bee, downloadOptions?: BeeRequestOptions, dataDownloadOptions?: DataDownloadOptions, ): Promise { dataDownloadOptions = dataDownloadOptions ?? {} - const bee = accountData.connection.bee - const { blocks } = await getFileMetadataWithBlocks( - bee, - accountData, - podName, - fullPath, - downloadOptions, - dataDownloadOptions, - ) - let totalLength = 0 for (const block of blocks) { totalLength += block.size @@ -152,7 +145,12 @@ export async function downloadData( percentage: calcUploadBlockPercentage(currentBlockId, totalBlocks), } updateDownloadProgress(dataDownloadOptions, DownloadProgressType.DownloadBlockStart, blockData) - const data = await bee.downloadData(block.reference, downloadOptions) + let data = (await bee.downloadData(block.reference, downloadOptions)) as Uint8Array + + if (meta.compression === Compression.GZIP) { + data = decompress(data) + } + updateDownloadProgress(dataDownloadOptions, DownloadProgressType.DownloadBlockEnd, blockData) result.set(data, offset) offset += data.length @@ -164,49 +162,65 @@ export async function downloadData( } /** - * Generate block name by block number + * Downloads file parts and compile them into Data + * + * @param accountData account data + * @param podName pod name + * @param fullPath full path to the file + * @param downloadOptions download options + * @param dataDownloadOptions data download options + */ +export async function downloadData( + accountData: AccountData, + podName: string, + fullPath: string, + downloadOptions?: BeeRequestOptions, + dataDownloadOptions?: DataDownloadOptions, +): Promise { + dataDownloadOptions = dataDownloadOptions ?? {} + const bee = accountData.connection.bee + const fileMeta = await getFileMetadataWithBlocks( + bee, + accountData, + podName, + fullPath, + downloadOptions, + dataDownloadOptions, + ) + + return prepareDataByMeta(fileMeta, fileMeta.blocks, bee, downloadOptions, dataDownloadOptions) +} + +/** + * Interface representing information about blocks. + * @interface */ -export function generateBlockName(blockNumber: number): string { - return 'block-' + blockNumber.toString().padStart(5, '0') +export interface BlocksInfo { + /** + * Blocks + */ + blocks: Blocks + /** + * File size + */ + fileSize: number } /** - * Uploads file content + * Uploads data blocks to a connection using the specified options. * - * @param podName pod where file is stored - * @param fullPath full path of the file - * @param data file content - * @param accountData account data - * @param options upload options + * @param {Connection} connection - The connection to upload the data blocks to. + * @param {Uint8Array | string | ExternalDataBlock[]} data - The data to be uploaded. It can be either a Uint8Array, a string, or an array of ExternalDataBlocks. + * @param {DataUploadOptions} options - The options for the data upload. + * @returns {Promise} - A promise that resolves to an object containing the BlocksInfo. */ -export async function uploadData( - podName: string, - fullPath: string, +export async function uploadBlocks( + connection: Connection, data: Uint8Array | string | ExternalDataBlock[], - accountData: AccountData, options: DataUploadOptions, -): Promise { - assertPodName(podName) - assertFullPathWithName(fullPath) - assertWallet(accountData.wallet) - +): Promise { const blockSize = options.blockSize ?? Number(DEFAULT_UPLOAD_OPTIONS!.blockSize) assertMinLength(blockSize, MINIMUM_BLOCK_SIZE, `Block size is too small. Minimum is ${MINIMUM_BLOCK_SIZE} bytes.`) - const contentType = options.contentType ?? String(DEFAULT_UPLOAD_OPTIONS!.contentType) - const connection = accountData.connection - updateUploadProgress(options, UploadProgressType.GetPodInfo) - const { podWallet, pod } = await getExtendedPodsListByAccountData(accountData, podName) - - updateUploadProgress(options, UploadProgressType.GetPathInfo) - const fullPathInfo = await getCreationPathInfo( - connection.bee, - fullPath, - prepareEthAddress(podWallet.address), - connection.options?.requestOptions, - ) - const pathInfo = extractPathInfo(fullPath) - const now = getUnixTimestamp() - const blocks: Blocks = { blocks: [] } let fileSize = data.length @@ -224,56 +238,172 @@ export async function uploadData( percentage: calcUploadBlockPercentage(i, totalBlocks), } updateUploadProgress(options, UploadProgressType.UploadBlockStart, blockData) - const currentBlock = getDataBlock(data, blockSize, i) - blocks.blocks.push(await uploadDataBlock(connection, currentBlock)) + let currentBlock = getDataBlock(data, blockSize, i) + const originalSize = currentBlock.length + + if (options.compression === Compression.GZIP) { + currentBlock = compress(currentBlock) + } + + blocks.blocks.push(await uploadDataBlock(connection, currentBlock, originalSize)) updateUploadProgress(options, UploadProgressType.UploadBlockEnd, blockData) } } - updateUploadProgress(options, UploadProgressType.UploadBlocksMeta) + return { + blocks, + fileSize, + } +} + +/** + * Uploads the metadata of blocks. + * + * @param {Connection} connection - The connection to upload the metadata to. + * @param {Blocks} blocks - The blocks to generate the metadata from. + * @param {string} filePath - The path of the file. + * @param {string} fileName - The name of the file. + * @param {number} fileSize - The size of the file in bytes. + * @param {number} blockSize - The size of each block in bytes. + * @param {string} contentType - The content type of the file. + * @param {string} compression - The compression algorithm used. + * @param {number} time - The Unix timestamp for the creation, access, and modification time of the file. Defaults to the current Unix timestamp. + * + * @returns {Promise} - The metadata of the uploaded blocks. + */ +export async function uploadBlocksMeta( + connection: Connection, + blocks: Blocks, + filePath: string, + fileName: string, + fileSize: number, + blockSize: number, + contentType: string, + compression: string, + time = getUnixTimestamp(), +): Promise { const manifestBytes = stringToBytes(blocksToManifest(blocks)) const blocksReference = (await uploadBytes(connection, manifestBytes)).reference - const meta: FileMetadata = { + + return { version: META_VERSION, - filePath: pathInfo.path, - fileName: pathInfo.filename, + filePath, + fileName, fileSize, blockSize, contentType, - compression: '', - creationTime: now, - accessTime: now, - modificationTime: now, + compression, + creationTime: time, + accessTime: time, + modificationTime: time, blocksReference, mode: getFileMode(DEFAULT_FILE_PERMISSIONS), } +} - updateUploadProgress(options, UploadProgressType.WriteDirectoryInfo) - await addEntryToDirectory(connection, podWallet, pod.password, pathInfo.path, pathInfo.filename, true) - updateUploadProgress(options, UploadProgressType.WriteFileInfo) - await writeFeedData( - connection, +/** + * Uploads file content + * + * @param connection connection information for data management + * @param socOwnerAddress owner address of the Single Owner Chunk + * @param socSigner signer of the Single Owner Chunk + * @param encryptionPassword data encryption password + * @param fullPath full path of the file + * @param data file content + * @param options upload options + */ +export async function uploadData( + connection: Connection, + socOwnerAddress: EthAddress, + socSigner: string | Uint8Array | Signer, + encryptionPassword: PodPasswordBytes, + fullPath: string, + data: Uint8Array | string | ExternalDataBlock[], + options: DataUploadOptions, +): Promise { + // empty pod name is acceptable in case of uploading the list of pods + const isPodUploading = fullPath === '' + + const blockSize = options.blockSize ?? Number(DEFAULT_UPLOAD_OPTIONS!.blockSize) + assertMinLength(blockSize, MINIMUM_BLOCK_SIZE, `Block size is too small. Minimum is ${MINIMUM_BLOCK_SIZE} bytes.`) + const contentType = options.contentType ?? String(DEFAULT_UPLOAD_OPTIONS!.contentType) + updateUploadProgress(options, UploadProgressType.GetPathInfo) + const fullPathInfo = await getCreationPathInfo( + connection.bee, fullPath, - getFileMetadataRawBytes(meta), - podWallet, - pod.password, - getNextEpoch(fullPathInfo?.lookupAnswer.epoch), + socOwnerAddress, + connection.options?.requestOptions, ) + const pathInfo = + fullPath === '/' + ? { + path: '/', + filename: '', + } + : extractPathInfo(fullPath, isPodUploading) + const { blocks, fileSize } = await uploadBlocks(connection, data, options) + updateUploadProgress(options, UploadProgressType.UploadBlocksMeta) + const meta = await uploadBlocksMeta( + connection, + blocks, + isPodUploading ? '/' : pathInfo.path, + pathInfo.filename, + fileSize, + blockSize, + contentType, + options.compression ?? '', + ) + updateUploadProgress(options, UploadProgressType.WriteFileInfo) + const nextEpoch = prepareEpoch(getNextEpoch(fullPathInfo?.lookupAnswer.epoch)) + const fileMetadataRawBytes = getFileMetadataRawBytes(meta) + await writeFeedData(connection, fullPath, fileMetadataRawBytes, socSigner, encryptionPassword, nextEpoch) updateUploadProgress(options, UploadProgressType.Done) - return meta + return { + lookupAnswer: { data: wrapChunkHelper(wrapBytesWithHelpers(fileMetadataRawBytes)), epoch: nextEpoch }, + meta, + } } /** * Upload data block * @param connection connection * @param block block to upload + * @param originalSize original size of the block */ -export async function uploadDataBlock(connection: Connection, block: Uint8Array): Promise { +export async function uploadDataBlock(connection: Connection, block: Uint8Array, originalSize: number): Promise { return { - size: block.length, + size: originalSize, compressedSize: block.length, reference: (await uploadBytes(connection, block)).reference, } } + +/** + * Retrieves the content of an index file about the directory. + * + * @param {AccountData} accountData - The account data. + * @param {string} podName - The name of the pod. + * @param {string} path - The path to the index file. + * @param {BeeRequestOptions} [downloadOptions] - The download options. + * @param {DataDownloadOptions} [dataDownloadOptions] - The data download options. + * + * @returns {Promise} - A promise that resolves with the content of the index file. + */ +export async function getIndexFileContent( + accountData: AccountData, + podName: string, + path: string, + downloadOptions?: BeeRequestOptions, + dataDownloadOptions?: DataDownloadOptions, +): Promise { + const combinedPath = getIndexFilePath(path) + const data = jsonParse( + (await downloadData(accountData, podName, combinedPath, downloadOptions, dataDownloadOptions)).text(), + 'getIndexFileContent', + ) + assertRawDirectoryMetadata(data) + + return data +} diff --git a/src/file/types.ts b/src/file/types.ts index ade834fd..31baf2c0 100644 --- a/src/file/types.ts +++ b/src/file/types.ts @@ -127,6 +127,16 @@ export interface ProgressCallback { progressCallback?: (info: T) => void } +/** + * Enumeration representing compression algorithms + */ +export enum Compression { + /** + * GZIP compression + */ + GZIP = 'gzip', +} + /** * File upload options */ @@ -139,6 +149,10 @@ export interface DataUploadOptions extends ProgressCallback * Content type of the file */ contentType?: string + /** + * Compression algorithm + */ + compression?: Compression } /** diff --git a/src/file/utils.ts b/src/file/utils.ts index 84ca9c87..c4ee3fd4 100644 --- a/src/file/utils.ts +++ b/src/file/utils.ts @@ -86,9 +86,13 @@ export async function uploadBytes(connection: Connection, data: Uint8Array): Pro * Extracts filename and path from full path * * @param fullPath full absolute path with filename + * @param isPod is pod path */ -export function extractPathInfo(fullPath: string): PathInfo { - assertFullPathWithName(fullPath) +export function extractPathInfo(fullPath: string, isPod = false): PathInfo { + if (!isPod) { + assertFullPathWithName(fullPath) + } + const exploded = splitPath(fullPath) const filename = exploded.pop() @@ -125,7 +129,16 @@ export async function downloadBlocksManifest( reference: Reference, downloadOptions?: BeeRequestOptions, ): Promise { - const data = (await bee.downloadData(reference, downloadOptions)).text() + return extractBlocksManifest((await bee.downloadData(reference, downloadOptions)).text()) +} + +/** + * Extracts blocks manifest from the given data. + * + * @param {string} data - The data from which to extract the blocks manifest. + * @return {Blocks} The extracted blocks manifest. + */ +export function extractBlocksManifest(data: string): Blocks { const rawBlocks = jsonParse(data, 'blocks manifest') assertRawBlocks(rawBlocks) diff --git a/src/pod/api.ts b/src/pod/api.ts index b9e380ae..55e39e5b 100644 --- a/src/pod/api.ts +++ b/src/pod/api.ts @@ -1,65 +1,65 @@ -import { Bee } from '@ethersphere/bee-js' -import { getFeedData } from '../feed/api' -import { POD_TOPIC } from './personal-storage' -import { ExtendedPodInfo, extractPods, PodsInfo } from './utils' +import { ExtendedPodInfo, getPodsData, migratePodV1ToV2, PodsVersion, PodsInfo, extractPodsV2 } from './utils' import { prepareEthAddress, privateKeyToBytes } from '../utils/wallet' import { utils } from 'ethers' import { DownloadOptions } from '../content-items/types' import { getWalletByIndex } from '../utils/cache/wallet' import { getPodsList as getPodsListCached } from './cache/api' +import { AccountData } from '../account/account-data' +import { Bee } from '@ethersphere/bee-js' /** * Gets pods list with lookup answer * - * @param bee Bee instance + * @param accountData account data + * @param bee Bee client * @param userWallet root wallet for downloading and decrypting data * @param downloadOptions request download */ export async function getPodsList( + accountData: AccountData, bee: Bee, userWallet: utils.HDNode, downloadOptions?: DownloadOptions, ): Promise { - let lookupAnswer - try { - lookupAnswer = await getFeedData( - bee, - POD_TOPIC, - prepareEthAddress(userWallet.address), - downloadOptions?.requestOptions, - ) - // eslint-disable-next-line no-empty - } catch (e) {} + let podsData = await getPodsData(bee, prepareEthAddress(userWallet.address), downloadOptions?.requestOptions) - if (!lookupAnswer) { - throw new Error('Pods data can not be found') + if (podsData.podsVersion === PodsVersion.V1) { + await migratePodV1ToV2(accountData, podsData, privateKeyToBytes(userWallet.privateKey)) + podsData = await getPodsData(bee, prepareEthAddress(userWallet.address), downloadOptions?.requestOptions) } - const podsList = extractPods(lookupAnswer.data.chunkContent(), privateKeyToBytes(userWallet.privateKey)) + const podsList = await extractPodsV2( + accountData, + podsData.lookupAnswer.data.chunkContent(), + privateKeyToBytes(userWallet.privateKey), + downloadOptions?.requestOptions, + ) return { podsList, - epoch: lookupAnswer.epoch, + epoch: podsData.lookupAnswer.epoch, } } /** * Gets pods list with lookup answer and extended info about pod * - * @param bee Bee instance + * @param accountData account data + * @param bee Bee client * @param podName pod to find * @param userWallet root wallet for downloading and decrypting data * @param seed seed of wallet owns the FDP account * @param downloadOptions request options */ export async function getExtendedPodsList( + accountData: AccountData, bee: Bee, podName: string, userWallet: utils.HDNode, seed: Uint8Array, downloadOptions?: DownloadOptions, ): Promise { - const { podsList, epoch } = await getPodsListCached(bee, userWallet, downloadOptions) + const { podsList, epoch } = await getPodsListCached(accountData, bee, userWallet, downloadOptions) const pod = podsList.pods.find(item => item.name === podName) if (!pod) { diff --git a/src/pod/cache/api.ts b/src/pod/cache/api.ts index a136d1c9..cdd5e22a 100644 --- a/src/pod/cache/api.ts +++ b/src/pod/cache/api.ts @@ -5,15 +5,18 @@ import { jsonToPodsList, podListToJSON, PodsInfo } from '../utils' import { CacheEpochData } from '../../cache/types' import { getCacheKey, processCacheData } from '../../cache/utils' import { getPodsList as getPodsNoCache } from '../api' +import { AccountData } from '../../account/account-data' /** * Gets pods list with lookup answer * - * @param bee Bee instance + * @param accountData account data + * @param bee Bee client * @param userWallet root wallet for downloading and decrypting data * @param downloadOptions request download */ export async function getPodsList( + accountData: AccountData, bee: Bee, userWallet: utils.HDNode, downloadOptions?: DownloadOptions, @@ -21,7 +24,7 @@ export async function getPodsList( return processCacheData({ key: getCacheKey(userWallet.address), onGetData: async (): Promise => { - const data = await getPodsNoCache(bee, userWallet, downloadOptions) + const data = await getPodsNoCache(accountData, bee, userWallet, downloadOptions) return { epoch: data.epoch, diff --git a/src/pod/personal-storage.ts b/src/pod/personal-storage.ts index c702b838..fd1990f7 100644 --- a/src/pod/personal-storage.ts +++ b/src/pod/personal-storage.ts @@ -1,6 +1,5 @@ -import { SharedPod, PodReceiveOptions, PodShareInfo, PodsList, Pod, PodsListPrepared } from './types' +import { Pod, PodReceiveOptions, PodShareInfo, PodsList, PodsListPrepared, SharedPod } from './types' import { assertAccount } from '../account/utils' -import { writeFeedData } from '../feed/api' import { AccountData } from '../account/account-data' import { assertPod, @@ -11,18 +10,19 @@ import { createPodShareInfo, getSharedPodInfo, podListToBytes, - podsListPreparedToPodsList, - podPreparedToPod, - sharedPodPreparedToSharedPod, podListToJSON, assertPodShareInfo, + podPreparedToPod, + podsListPreparedToPodsList, + sharedPodPreparedToSharedPod, + uploadPodDataV2, } from './utils' import { getExtendedPodsList } from './api' import { uploadBytes } from '../file/utils' import { bytesToString, stringToBytes } from '../utils/bytes' import { Reference, Utils } from '@ethersphere/bee-js' import { assertEncryptedReference, EncryptedReference, HexString } from '../utils/hex' -import { prepareEthAddress, preparePrivateKey } from '../utils/wallet' +import { prepareEthAddress } from '../utils/wallet' import { getCacheKey, setEpochCache } from '../cache/utils' import { getPodsList } from './cache/api' import { getNextEpoch } from '../feed/lookup/utils' @@ -39,8 +39,6 @@ import { decryptWithBytes, deriveSecretFromKeys } from '../utils/encryption' import { namehash } from 'ethers/lib/utils' import { BigNumber } from 'ethers' -export const POD_TOPIC = 'Pods' - export class PersonalStorage { constructor( private accountData: AccountData, @@ -65,7 +63,7 @@ export class PersonalStorage { try { podsList = ( - await getPodsList(this.accountData.connection.bee, this.accountData.wallet!, { + await getPodsList(this.accountData, this.accountData.connection.bee, this.accountData.wallet!, { requestOptions: this.accountData.connection.options?.requestOptions, cacheInfo: this.accountData.connection.cacheInfo, }) @@ -87,7 +85,7 @@ export class PersonalStorage { assertAccount(this.accountData, { writeRequired: true }) const pod = await createPod( - this.accountData.connection.bee, + this.accountData, this.accountData.connection, this.accountData.wallet!, this.accountData.seed!, @@ -111,7 +109,7 @@ export class PersonalStorage { async delete(name: string): Promise { assertAccount(this.accountData, { writeRequired: true }) name = name.trim() - const podsInfo = await getPodsList(this.accountData.connection.bee, this.accountData.wallet!, { + const podsInfo = await getPodsList(this.accountData, this.accountData.connection.bee, this.accountData.wallet!, { requestOptions: this.accountData.connection.options?.requestOptions, cacheInfo: this.accountData.connection.cacheInfo, }) @@ -128,13 +126,11 @@ export class PersonalStorage { const allPodsData = podListToBytes(podsFiltered, podsSharedFiltered) const wallet = this.accountData.wallet! const epoch = getNextEpoch(podsInfo.epoch) - await writeFeedData( + await uploadPodDataV2( this.accountData.connection, - POD_TOPIC, + prepareEthAddress(wallet.address), + wallet.privateKey, allPodsData, - wallet, - preparePrivateKey(wallet.privateKey), - epoch, ) await setEpochCache(this.accountData.connection.cacheInfo, getCacheKey(wallet.address), { epoch, @@ -156,10 +152,17 @@ export class PersonalStorage { assertPodName(name) const wallet = this.accountData.wallet! const address = prepareEthAddress(wallet.address) - const podInfo = await getExtendedPodsList(this.accountData.connection.bee, name, wallet, this.accountData.seed!, { - requestOptions: this.accountData.connection.options?.requestOptions, - cacheInfo: this.accountData.connection.cacheInfo, - }) + const podInfo = await getExtendedPodsList( + this.accountData, + this.accountData.connection.bee, + name, + wallet, + this.accountData.seed!, + { + requestOptions: this.accountData.connection.options?.requestOptions, + cacheInfo: this.accountData.connection.cacheInfo, + }, + ) const data = stringToBytes( JSON.stringify(createPodShareInfo(name, podInfo.podAddress, address, podInfo.pod.password)), @@ -200,7 +203,7 @@ export class PersonalStorage { const data = await this.getSharedInfo(reference) const pod = await createPod( - this.accountData.connection.bee, + this.accountData, this.accountData.connection, this.accountData.wallet!, this.accountData.seed!, diff --git a/src/pod/types.ts b/src/pod/types.ts index 4fe74f6c..df7d404d 100644 --- a/src/pod/types.ts +++ b/src/pod/types.ts @@ -1,6 +1,8 @@ -import { Utils, Reference } from '@ethersphere/bee-js' +import { Reference } from '@ethersphere/bee-js' import { PodPasswordBytes } from '../utils/encryption' import { HexString } from '../utils/hex' +import { LookupAnswer } from '../feed/types' +import { EthAddress } from '../utils/eth' /** * Pods information prepared for internal usage @@ -54,7 +56,7 @@ export interface SharedPod extends PodName { */ export interface SharedPodPrepared extends PodName { password: PodPasswordBytes - address: Utils.EthAddress + address: EthAddress } /** @@ -93,6 +95,20 @@ export interface FileMetadata { mode: number } +/** + * Information about a file with lookup answer + */ +export interface FileMetadataWithLookupAnswer { + /** + * File metadata + */ + meta: FileMetadata + /** + * Lookup answer + */ + lookupAnswer: LookupAnswer +} + /** * Information about a directory */ diff --git a/src/pod/utils.ts b/src/pod/utils.ts index 252fc947..79a4a399 100644 --- a/src/pod/utils.ts +++ b/src/pod/utils.ts @@ -1,18 +1,18 @@ import { + FileMetadataWithLookupAnswer, Pod, - PodPrepared, PodName, + PodPrepared, PodShareInfo, + PodsList, + PodsListPrepared, RawDirectoryMetadata, SharedPod, SharedPodPrepared, - PodsListPrepared, - PodsList, } from './types' -import { Bee, Data, Utils } from '@ethersphere/bee-js' +import { Bee, BeeRequestOptions, Data, Utils } from '@ethersphere/bee-js' import { assertAllowedZeroBytes, - assertMaxLength, bytesToString, MAX_ZEROS_PERCENTAGE_ALLOWED, stringToBytes, @@ -36,25 +36,69 @@ import { bytesToHex, EncryptedReference, isHexEthAddress } from '../utils/hex' import { getExtendedPodsList } from './api' import { Epoch, getFirstEpoch } from '../feed/lookup/epoch' import { getUnixTimestamp } from '../utils/time' -import { writeFeedData } from '../feed/api' -import { preparePrivateKey } from '../utils/wallet' +import { getFeedData } from '../feed/api' import { createRootDirectory } from '../directory/handler' -import { POD_TOPIC } from './personal-storage' import { Connection } from '../connection/connection' import { AccountData } from '../account/account-data' import { decryptBytes, POD_PASSWORD_LENGTH, PodPasswordBytes } from '../utils/encryption' import CryptoJS from 'crypto-js' import { jsonParse } from '../utils/json' -import { DEFAULT_DIRECTORY_PERMISSIONS, getDirectoryMode } from '../directory/utils' +import { assertRawFileMetadata, DEFAULT_DIRECTORY_PERMISSIONS, getDirectoryMode } from '../directory/utils' import { getCacheKey, setEpochCache } from '../cache/utils' import { getWalletByIndex } from '../utils/cache/wallet' import { getPodsList } from './cache/api' -import { CHUNK_SIZE } from '../account/utils' +import { LookupAnswer } from '../feed/types' +import { prepareDataByMeta, uploadData } from '../file/handler' +import { Compression } from '../file/types' +import { MINIMUM_BLOCK_SIZE } from '../content-items/handler' +import { downloadBlocksManifest } from '../file/utils' +import { extractMetadata } from '../content-items/utils' +import { rawFileMetadataToFileMetadata } from '../file/adapter' +import { prepareEthAddress, preparePrivateKey } from '../utils/wallet' +import { EthAddress } from '../utils/eth' export const META_VERSION = 2 export const MAX_PODS_COUNT = 65536 export const MAX_POD_NAME_LENGTH = 64 +/** + * Represents the topic for Pods version 1 + */ +export const POD_TOPIC = 'Pods' + +/** + * Represents the topic for Pods version 2 + */ +export const POD_TOPIC_V2 = 'PodsV2' + +/** + * Enum representing the version of PODs + */ +export enum PodsVersion { + /** + * Version 1 + */ + V1 = 'v1', + /** + * Version 2 + */ + V2 = 'v2', +} + +/** + * Answer with LookupAnswer and pods version + */ +export interface PodsLookupAnswer { + /** + * Pods version + */ + podsVersion: PodsVersion + /** + * Lookup answer + */ + lookupAnswer: LookupAnswer +} + /** * Information about pods list */ @@ -69,7 +113,7 @@ export interface PodsInfo { export interface ExtendedPodInfo { pod: PodPrepared podWallet: utils.HDNode - podAddress: Utils.EthAddress + podAddress: EthAddress epoch: Epoch } @@ -83,12 +127,45 @@ export interface PathInfo { /** * Extracts pod information from raw data - * * @param data raw data with pod information * @param podPassword bytes of pod password */ -export function extractPods(data: Data, podPassword: PodPasswordBytes): PodsListPrepared { - return jsonToPodsList(bytesToString(decryptBytes(bytesToHex(podPassword), data))) +export function extractPodsV1(data: Data, podPassword: PodPasswordBytes): PodsListPrepared { + return jsonToPodsList(bytesToString(extractPodsBytes(data, podPassword))) +} + +/** + * Extracts and prepares a list of pods from the given V2 data using the provided pod password + * @param accountData + * @param {Data} data - The data from which to extract the pods. + * @param {PodPasswordBytes} podPassword - The password to decrypt the pods. + * @param downloadOptions + * @return {PodsListPrepared} - The list of pods that have been extracted and prepared. + */ +export async function extractPodsV2( + accountData: AccountData, + data: Data, + podPassword: PodPasswordBytes, + downloadOptions?: BeeRequestOptions, +): Promise { + const meta = extractMetadata(data, podPassword) + assertRawFileMetadata(meta) + const fileMeta = rawFileMetadataToFileMetadata(meta) + const blocksData = await downloadBlocksManifest(accountData.connection.bee, fileMeta.blocksReference, downloadOptions) + const preparedData = bytesToString( + await prepareDataByMeta(fileMeta, blocksData.blocks, accountData.connection.bee, downloadOptions), + ) + + return jsonToPodsList(preparedData) +} + +/** + * Decrypts pod information from raw data + * @param data raw data with pod information + * @param podPassword bytes of pod password + */ +export function extractPodsBytes(data: Data, podPassword: PodPasswordBytes): Uint8Array { + return decryptBytes(bytesToHex(podPassword), data) } /** @@ -335,8 +412,8 @@ export function assertSharedPods(value: unknown): asserts value is SharedPodPrep */ export function createPodShareInfo( podName: string, - podAddress: Utils.EthAddress, - userAddress: Utils.EthAddress, + podAddress: EthAddress, + userAddress: EthAddress, password: PodPasswordBytes, ): PodShareInfo { return { @@ -380,19 +457,25 @@ export function getRandomPodPassword(): PodPasswordBytes { /** * Creates user's pod or add a shared pod to an account * - * @param bee Bee instance + * @param accountData AccountData instance * @param connection Connection instance * @param userWallet FDP account wallet * @param seed FDP account seed * @param pod pod information to create */ export async function createPod( - bee: Bee, + accountData: AccountData, connection: Connection, userWallet: utils.HDNode, seed: Uint8Array, pod: PodName | SharedPodPrepared, ): Promise { + const wallet = accountData.wallet! + + if (!wallet) { + throw new Error('Wallet is not initialized') + } + assertPodNameType(pod) pod.name = pod.name.trim() assertPodName(pod.name) @@ -401,7 +484,7 @@ export async function createPod( let podsList: PodsListPrepared = { pods: [], sharedPods: [] } let podsInfo try { - podsInfo = await getPodsList(bee, userWallet, { + podsInfo = await getPodsList(accountData, connection.bee, userWallet, { requestOptions: connection.options?.requestOptions, cacheInfo, }) @@ -432,8 +515,7 @@ export async function createPod( } const allPodsData = podListToBytes(pods, sharedPods) - assertMaxLength(allPodsData.length, CHUNK_SIZE, `Exceeded pod list size by ${allPodsData.length - CHUNK_SIZE} bytes`) - await writeFeedData(connection, POD_TOPIC, allPodsData, userWallet, preparePrivateKey(userWallet.privateKey), epoch) + await uploadPodDataV2(accountData.connection, prepareEthAddress(wallet?.address), wallet.privateKey, allPodsData) if (isPod(realPod)) { const podWallet = await getWalletByIndex(seed, nextIndex, cacheInfo) @@ -448,6 +530,33 @@ export async function createPod( return realPod } +/** + * Uploads pods data using version 2 method + * @param connection Connection instance + * @param socOwnerAddress Single Owner Chunk owner address + * @param socSigner Single Owner Chunk signer + * @param allPodsData pods data + */ +export async function uploadPodDataV2( + connection: Connection, + socOwnerAddress: EthAddress, + socSigner: string, + allPodsData: Uint8Array, +): Promise { + return uploadData( + connection, + socOwnerAddress, + socSigner, + preparePrivateKey(socSigner), + getPodV2Topic(), + allPodsData, + { + blockSize: MINIMUM_BLOCK_SIZE, + compression: Compression.GZIP, + }, + ) +} + /** * Gets extended information about pods using AccountData instance and pod name * @@ -458,7 +567,7 @@ export async function getExtendedPodsListByAccountData( accountData: AccountData, podName: string, ): Promise { - return getExtendedPodsList(accountData.connection.bee, podName, accountData.wallet!, accountData.seed!, { + return getExtendedPodsList(accountData, accountData.connection.bee, podName, accountData.wallet!, accountData.seed!, { requestOptions: accountData.connection.options?.requestOptions, cacheInfo: accountData.connection.cacheInfo, }) @@ -551,7 +660,11 @@ export function assertJsonPod(value: unknown): asserts value is Pod { } /** - * Asserts that json shared pod is correct + * Asserts that a value is a valid SharedPod. + * + * @param {unknown} value - The value to assert. + * + * @throws {Error} Invalid json shared pod if the value is not a valid SharedPod. */ export function assertJsonSharedPod(value: unknown): asserts value is SharedPod { if (!isJsonSharedPod(value)) { @@ -560,12 +673,92 @@ export function assertJsonSharedPod(value: unknown): asserts value is SharedPod } /** - * Converts JsonSharedPod to SharedPod + * Converts a SharedPod object from JSON format to a preprocessed SharedPod object. + * + * @param {SharedPod} pod - The SharedPod object in JSON format. + * @return {SharedPodPrepared} - The preprocessed SharedPod object. */ export function jsonSharedPodToSharedPod(pod: SharedPod): SharedPodPrepared { const password = Utils.hexToBytes(pod.password) as PodPasswordBytes - const address = Utils.hexToBytes(pod.address) as Utils.EthAddress + const address = Utils.hexToBytes(pod.address) as EthAddress assertPodPasswordBytes(password) return { ...pod, password, address } } + +/** + * Retrieves pods data for a given address. + * + * @param {Bee} bee - The bee client object. + * @param {EthAddress} address - The address to look up pods data for. + * @param {BeeRequestOptions} [requestOptions] - The request options. + * @returns {Promise} The pods data. + * @throws {Error} If the pods data cannot be found. + */ +export async function getPodsData( + bee: Bee, + address: EthAddress, + requestOptions?: BeeRequestOptions, +): Promise { + let podsVersion = PodsVersion.V2 + let lookupAnswer + try { + lookupAnswer = await getFeedData(bee, getPodV2Topic(), address, requestOptions) + // eslint-disable-next-line no-empty + } catch (e) {} + + // if V2 does not exist, try V1 + if (!lookupAnswer) { + try { + lookupAnswer = await getFeedData(bee, POD_TOPIC, address, requestOptions) + podsVersion = PodsVersion.V1 + // eslint-disable-next-line no-empty + } catch (e) {} + } + + if (!lookupAnswer) { + throw new Error('Pods data can not be found') + } + + return { + podsVersion, + lookupAnswer, + } +} + +/** + * Migrates a pod from version 1 to version 2. + * + * @param {AccountData} accountData - The account data. + * @param {PodsLookupAnswer} podsData - The pods data. + * @param {PodPasswordBytes} privateKey - The private key used for migration. + * @return {Promise} - The pods data with the migrated version. + */ +export async function migratePodV1ToV2( + accountData: AccountData, + podsData: PodsLookupAnswer, + privateKey: PodPasswordBytes, +): Promise { + const podList = extractPodsV1(podsData.lookupAnswer.data.chunkContent(), privateKey) + + const podsBytes = podListToBytes(podList.pods, podList.sharedPods) + + const wallet = accountData.wallet! + const lookupAnswer = ( + await uploadPodDataV2(accountData.connection, prepareEthAddress(wallet.address), wallet.privateKey, podsBytes) + ).lookupAnswer + + return { + podsVersion: PodsVersion.V2, + lookupAnswer, + } +} + +/** + * Retrieves the topic for Pod version 2. + * + * @returns {string} The topic for Pod version 2. + */ +export function getPodV2Topic(): string { + return `/${POD_TOPIC_V2}` +} diff --git a/src/utils/compression.ts b/src/utils/compression.ts new file mode 100644 index 00000000..d89e95b6 --- /dev/null +++ b/src/utils/compression.ts @@ -0,0 +1,17 @@ +import pako from 'pako' + +/** + * Compresses the given data using gzip + * @param data input data + */ +export function compress(data: Uint8Array): Uint8Array { + return pako.gzip(data) +} + +/** + * Decompresses the given data using gzip + * @param data input data + */ +export function decompress(data: Uint8Array): Uint8Array { + return pako.ungzip(data) +} diff --git a/src/utils/eth.ts b/src/utils/eth.ts new file mode 100644 index 00000000..aabd84ae --- /dev/null +++ b/src/utils/eth.ts @@ -0,0 +1,3 @@ +import { Utils } from '@ethersphere/bee-js' + +export type EthAddress = Utils.EthAddress diff --git a/src/utils/hex.ts b/src/utils/hex.ts index 85dfa387..da89cac0 100644 --- a/src/utils/hex.ts +++ b/src/utils/hex.ts @@ -6,7 +6,6 @@ export type HexEthAddress = HexString<40> /** * Nominal type to represent hex strings WITHOUT '0x' prefix. * For example for 32 bytes hex representation you have to use 64 length. - * TODO: Make Length mandatory: https://github.com/ethersphere/bee-js/issues/208 */ export type HexString = FlavoredType< string & { diff --git a/src/utils/type.ts b/src/utils/type.ts index 962a170d..2db91e97 100644 --- a/src/utils/type.ts +++ b/src/utils/type.ts @@ -1,6 +1,6 @@ import { Utils } from '@ethersphere/bee-js' import { POD_PASSWORD_LENGTH, PodPasswordBytes } from './encryption' -import { utils, Wallet } from 'ethers' +import { EthAddress } from './eth' export type { PublicKey } from '@fairdatasociety/fdp-contracts-js' export const ETH_ADDR_HEX_LENGTH = 40 @@ -35,7 +35,7 @@ export function assertString(value: unknown, customMessage?: string): asserts va /** * Checks that value is a valid Ethereum address string (without 0x prefix) */ -export function isEthAddress(value: unknown): value is Utils.EthAddress { +export function isEthAddress(value: unknown): value is EthAddress { return Utils.isHexString(value) && value.length === ETH_ADDR_HEX_LENGTH } @@ -93,12 +93,3 @@ export function assertPodPasswordBytes(value: PodPasswordBytes): asserts value i export function isArrayBufferView(value: unknown): value is ArrayBufferView { return ArrayBuffer.isView(value) } - -/** - * Asserts that the given value is a wallet - */ -export function assertWallet(value: unknown): asserts value is utils.HDNode | Wallet { - if (!value) { - throw new Error('Empty wallet') - } -} diff --git a/src/utils/wallet.ts b/src/utils/wallet.ts index 65eb45b6..dd21b4be 100644 --- a/src/utils/wallet.ts +++ b/src/utils/wallet.ts @@ -1,6 +1,7 @@ import { utils } from 'ethers' import { PrivateKeyBytes, Utils } from '@ethersphere/bee-js' import { removeZeroFromHex } from '../account/utils' +import { EthAddress } from './eth' /** * Get Hierarchical Deterministic Wallet from seed by index @@ -37,7 +38,7 @@ export function mnemonicToSeed(mnemonic: string): Uint8Array { * * @param address Ethereum address for preparation */ -export function prepareEthAddress(address: string | Uint8Array): Utils.EthAddress { +export function prepareEthAddress(address: string | Uint8Array): EthAddress { return Utils.makeEthAddress(address) } diff --git a/test/integration/fairos/fdp-class.fairos.spec.ts b/test/integration/fairos/fdp-class.fairos.spec.ts index 93581175..b0865526 100644 --- a/test/integration/fairos/fdp-class.fairos.spec.ts +++ b/test/integration/fairos/fdp-class.fairos.spec.ts @@ -7,6 +7,7 @@ import { getRawMetadata } from '../../../src/content-items/utils' import { RawDirectoryMetadata, RawFileMetadata } from '../../../src/pod/types' import { DEFAULT_FILE_PERMISSIONS, getFileMode } from '../../../src/file/utils' import { DEFAULT_DIRECTORY_PERMISSIONS, getDirectoryMode } from '../../../src/directory/utils' +import { getIndexFileContent } from '../../../src/file/handler' jest.setTimeout(400000) describe('Fair Data Protocol with FairOS-dfs', () => { @@ -143,49 +144,50 @@ describe('Fair Data Protocol with FairOS-dfs', () => { expect(fairosList2.sharedPods).toHaveLength(0) }) - it('should delete pod in fairos and it will disappear in fdp', async () => { - const fairos = new FairOSApi() - const fdp = createFdp() - const user = generateUser() - const podName1 = generateRandomHexString() - const podName2 = generateRandomHexString() - - await topUpAddress(user.address) - await fairos.register(user.username, user.password, user.mnemonic) - const createResponse1 = await fairos.podNew(podName1, user.password) - const createResponse2 = await fairos.podNew(podName2, user.password) - expect(createResponse1.status).toEqual(201) - expect(createResponse1.data).toStrictEqual({ message: 'pod created successfully' }) - expect(createResponse2.status).toEqual(201) - expect(createResponse2.data).toStrictEqual({ message: 'pod created successfully' }) - - await fdp.account.login(user.username, user.password) - const fdpPods = (await fdp.personalStorage.list()).pods - expect(fdpPods).toHaveLength(2) - expect(fdpPods.find(item => item.name === podName1)).toBeDefined() - expect(fdpPods.find(item => item.name === podName2)).toBeDefined() - - const deleteResponse1 = await fairos.podDelete(podName1, user.password) - expect(deleteResponse1.data).toEqual({ message: 'pod deleted successfully' }) - - const fdpPods2 = (await fdp.personalStorage.list()).pods - expect(fdpPods2).toHaveLength(1) - expect(fdpPods2.find(item => item.name === podName1)).toBeUndefined() - expect(fdpPods2.find(item => item.name === podName2)).toBeDefined() - - // test mixed interaction (pod created from fairos and deleted with fdp) - await fdp.personalStorage.delete(podName2) - const fdpResponse3 = await fdp.personalStorage.list() - expect(fdpResponse3).toEqual({ - pods: [], - sharedPods: [], - }) - const fairosResponse3 = await fairos.podLs() - expect(fairosResponse3.data).toEqual({ - pods: [], - sharedPods: [], - }) - }) + // todo on the fairos side: AxiosError: Request failed with status code 500. Server response: {"message":"delete pod: could not delete file inode bd74c092eca12d33fa007e9bb7388ea5ff350b012924cd31ad4a99f0e84a1a6f96238607966946b2fbe90ff111561e33bbf279070761001bede787d6ab3570ae: failed to unpin reference : {\"code\":500,\"message\":\"unpin root hash: deletion of pin failed\"}\n\n"} + // it('should delete pod in fairos and it will disappear in fdp', async () => { + // const fairos = new FairOSApi() + // const fdp = createFdp() + // const user = generateUser() + // const podName1 = generateRandomHexString() + // const podName2 = generateRandomHexString() + // + // await topUpAddress(user.address) + // await fairos.register(user.username, user.password, user.mnemonic) + // const createResponse1 = await fairos.podNew(podName1, user.password) + // const createResponse2 = await fairos.podNew(podName2, user.password) + // expect(createResponse1.status).toEqual(201) + // expect(createResponse1.data).toStrictEqual({ message: 'pod created successfully' }) + // expect(createResponse2.status).toEqual(201) + // expect(createResponse2.data).toStrictEqual({ message: 'pod created successfully' }) + // + // await fdp.account.login(user.username, user.password) + // const fdpPods = (await fdp.personalStorage.list()).pods + // expect(fdpPods).toHaveLength(2) + // expect(fdpPods.find(item => item.name === podName1)).toBeDefined() + // expect(fdpPods.find(item => item.name === podName2)).toBeDefined() + // + // const deleteResponse1 = await fairos.podDelete(podName1, user.password) + // expect(deleteResponse1.data).toEqual({ message: 'pod deleted successfully' }) + // + // const fdpPods2 = (await fdp.personalStorage.list()).pods + // expect(fdpPods2).toHaveLength(1) + // expect(fdpPods2.find(item => item.name === podName1)).toBeUndefined() + // expect(fdpPods2.find(item => item.name === podName2)).toBeDefined() + // + // // test mixed interaction (pod created from fairos and deleted with fdp) + // await fdp.personalStorage.delete(podName2) + // const fdpResponse3 = await fdp.personalStorage.list() + // expect(fdpResponse3).toEqual({ + // pods: [], + // sharedPods: [], + // }) + // const fairosResponse3 = await fairos.podLs() + // expect(fairosResponse3.data).toEqual({ + // pods: [], + // sharedPods: [], + // }) + // }) }) describe('Directory', () => { @@ -205,26 +207,25 @@ describe('Fair Data Protocol with FairOS-dfs', () => { await fdp.account.register(fdp.account.createRegistrationRequest(user.username, user.password)) await fdp.personalStorage.create(podName1) await fdp.directory.create(podName1, fullDirectoryName1) + await fdp.directory.create(podName1, fullSubDirectoryName1) + await fdp.directory.create(podName1, fullDirectoryName2) + await fairos.login(user.username, user.password) await fairos.podOpen(podName1, user.password) - const response = await fairos.dirLs(podName1) - const data = response.data as Directories - expect(response.status).toEqual(200) - expect(data.dirs).toHaveLength(1) - const dir1 = data.dirs[0] - expect(dir1.name).toEqual(directoryName1) - expect(dir1.contentType).toEqual('inode/directory') - expect(dir1.creationTime).toBeDefined() - expect(dir1.modificationTime).toBeDefined() - expect(dir1.accessTime).toBeDefined() + const response1 = await fairos.dirLs(podName1) + const data1 = response1.data as Directories + expect(response1.status).toEqual(200) + expect(data1.dirs).toHaveLength(2) + for (const dir of data1.dirs) { + expect(dir.contentType).toEqual('inode/directory') + expect(dir.creationTime).toBeDefined() + expect(dir.modificationTime).toBeDefined() + expect(dir.accessTime).toBeDefined() + } - await fdp.directory.create(podName1, fullSubDirectoryName1) - await fdp.directory.create(podName1, fullDirectoryName2) - const response2 = await fairos.dirLs(podName1) - expect(response2.data?.dirs).toHaveLength(2) - const dirs2 = (response2.data as Directories).dirs - expect(dirs2.find(item => item.name === directoryName1)).toBeDefined() - expect(dirs2.find(item => item.name === directoryName2)).toBeDefined() + const dirs1 = data1.dirs + expect(dirs1.find(item => item.name === directoryName1)).toBeDefined() + expect(dirs1.find(item => item.name === directoryName2)).toBeDefined() const data3 = (await fairos.dirLs(podName1, fullDirectoryName1)).data as Directories const dirs3 = data3.dirs @@ -273,6 +274,9 @@ describe('Fair Data Protocol with FairOS-dfs', () => { // test mixed clients directory creation in the same account await fdp.directory.create(podName1, fullDirectoryName3) + // reset the cache by using podClose and podOpen + await fairos.podClose(podName1) + await fairos.podOpen(podName1, user.password) const response3 = (await fairos.dirLs(podName1)).data as Directories expect(response3.dirs).toHaveLength(3) expect(response3.dirs.find(item => item.name === directoryName3)).toBeDefined() @@ -300,43 +304,46 @@ describe('Fair Data Protocol with FairOS-dfs', () => { expect(dirs1[0].contentType).toEqual('inode/directory') await fdp.directory.delete(podName1, fullDirectoryName1) + await fairos.podClose(podName1) + await fairos.podOpen(podName1, user.password) const response2 = await fairos.dirLs(podName1) const dirs2 = response2.data?.dirs expect(dirs2).toBeUndefined() }) - it('should delete directory in fairos and it will disappear in fdp', async () => { - const fairos = new FairOSApi() - const fdp = createFdp() - const user = generateUser() - const podName1 = generateRandomHexString() - const directoryName1 = generateRandomHexString() - const fullDirectoryName1 = '/' + directoryName1 - - await topUpAddress(user.address) - await fairos.register(user.username, user.password, user.mnemonic) - await fairos.podNew(podName1, user.password) - await fairos.dirMkdir(podName1, fullDirectoryName1, user.password) - - await fdp.account.login(user.username, user.password) - const fdpResponse = await fdp.directory.read(podName1, '/', true) - expect(fdpResponse.directories).toHaveLength(1) - const dir1 = fdpResponse.directories[0] - expect(dir1.name).toEqual(directoryName1) - - await fairos.dirRmdir(podName1, fullDirectoryName1) - const fdpResponse2 = await fdp.directory.read(podName1, '/', true) - expect(fdpResponse2.directories).toHaveLength(0) - - // test mixed interaction (directory created from fairos and deleted with fdp) - await expect(fdp.directory.delete(podName1, fullDirectoryName1)).rejects.toThrow( - `Item "${fullDirectoryName1}" not found in the list of items`, - ) - const fdpResponse3 = await fdp.directory.read(podName1, '/', true) - expect(fdpResponse3.directories).toHaveLength(0) - const fairosDirs = await fairos.dirLs(podName1) - expect(fairosDirs.data).toEqual({}) - }) + // todo on the fairos side: AxiosError: Request failed with status code 500. Server response: {"message":"delete pod: could not delete file inode bd74c092eca12d33fa007e9bb7388ea5ff350b012924cd31ad4a99f0e84a1a6f96238607966946b2fbe90ff111561e33bbf279070761001bede787d6ab3570ae: failed to unpin reference : {\"code\":500,\"message\":\"unpin root hash: deletion of pin failed\"}\n\n"} + // it('should delete directory in fairos and it will disappear in fdp', async () => { + // const fairos = new FairOSApi() + // const fdp = createFdp() + // const user = generateUser() + // const podName1 = generateRandomHexString() + // const directoryName1 = generateRandomHexString() + // const fullDirectoryName1 = '/' + directoryName1 + // + // await topUpAddress(user.address) + // await fairos.register(user.username, user.password, user.mnemonic) + // await fairos.podNew(podName1, user.password) + // await fairos.dirMkdir(podName1, fullDirectoryName1, user.password) + // + // await fdp.account.login(user.username, user.password) + // const fdpResponse = await fdp.directory.read(podName1, '/', true) + // expect(fdpResponse.directories).toHaveLength(1) + // const dir1 = fdpResponse.directories[0] + // expect(dir1.name).toEqual(directoryName1) + // + // await fairos.dirRmdir(podName1, fullDirectoryName1) + // const fdpResponse2 = await fdp.directory.read(podName1, '/', true) + // expect(fdpResponse2.directories).toHaveLength(0) + // + // // test mixed interaction (directory created from fairos and deleted with fdp) + // await expect(fdp.directory.delete(podName1, fullDirectoryName1)).rejects.toThrow( + // `Item "${fullDirectoryName1}" not found in the list of items`, + // ) + // const fdpResponse3 = await fdp.directory.read(podName1, '/', true) + // expect(fdpResponse3.directories).toHaveLength(0) + // const fairosDirs = await fairos.dirLs(podName1) + // expect(fairosDirs.data).toEqual({}) + // }) }) describe('File', () => { @@ -396,6 +403,9 @@ describe('Fair Data Protocol with FairOS-dfs', () => { }, ]) + const itemsList1 = await fdp.directory.read(podName1, '/') + expect(itemsList1.files).toHaveLength(1) + expect(itemsList1.files[0].name).toEqual(filenameBig) const file1 = wrapBytesWithHelpers(await fdp.file.downloadData(podName1, fullFilenameBigPath)) expect(file1.text()).toEqual(contentBig) @@ -440,43 +450,44 @@ describe('Fair Data Protocol with FairOS-dfs', () => { expect(dirs2).toBeUndefined() }) - it('should delete file in fairos and it will disappear in fdp', async () => { - const fairos = new FairOSApi() - const fdp = createFdp() - const user = generateUser() - const podName1 = generateRandomHexString() - const fileSizeBig = 1000015 - const contentBig = generateRandomHexString(fileSizeBig) - const contentBig2 = generateRandomHexString(fileSizeBig) - const filenameBig = generateRandomHexString() + '.txt' - const filenameBig2 = generateRandomHexString() + '.txt' - const fullFilenameBigPath = '/' + filenameBig - const fullFilenameBigPath2 = '/' + filenameBig2 - - await topUpAddress(user.address) - await fairos.register(user.username, user.password, user.mnemonic) - await fairos.podNew(podName1, user.password) - await fairos.fileUpload(podName1, '/', contentBig, filenameBig) - await fairos.fileUpload(podName1, '/', contentBig2, filenameBig2) - - await fdp.account.login(user.username, user.password) - const fdpResponse = await fdp.directory.read(podName1, '/', true) - expect(fdpResponse.files).toHaveLength(2) - expect(fdpResponse.files[0].name).toEqual(filenameBig) - expect(fdpResponse.files[1].name).toEqual(filenameBig2) - - await fairos.fileDelete(podName1, fullFilenameBigPath) - const fdpResponse2 = await fdp.directory.read(podName1, '/', true) - expect(fdpResponse2.files).toHaveLength(1) - expect(fdpResponse2.files[0].name).toEqual(filenameBig2) - - // test mixed interaction (file created from fairos and deleted with fdp) - await fdp.file.delete(podName1, fullFilenameBigPath2) - const fdpResponse3 = await fdp.directory.read(podName1, '/', true) - expect(fdpResponse3.files).toHaveLength(0) - const fairosDirs = await fairos.dirLs(podName1) - expect(fairosDirs.data).toEqual({}) - }) + // todo on the fairos side: AxiosError: Request failed with status code 500. Server response: {"message":"delete pod: could not delete file inode bd74c092eca12d33fa007e9bb7388ea5ff350b012924cd31ad4a99f0e84a1a6f96238607966946b2fbe90ff111561e33bbf279070761001bede787d6ab3570ae: failed to unpin reference : {\"code\":500,\"message\":\"unpin root hash: deletion of pin failed\"}\n\n"} + // it('should delete file in fairos and it will disappear in fdp', async () => { + // const fairos = new FairOSApi() + // const fdp = createFdp() + // const user = generateUser() + // const podName1 = generateRandomHexString() + // const fileSizeBig = 1000015 + // const contentBig = generateRandomHexString(fileSizeBig) + // const contentBig2 = generateRandomHexString(fileSizeBig) + // const filenameBig = generateRandomHexString() + '.txt' + // const filenameBig2 = generateRandomHexString() + '.txt' + // const fullFilenameBigPath = '/' + filenameBig + // const fullFilenameBigPath2 = '/' + filenameBig2 + // + // await topUpAddress(user.address) + // await fairos.register(user.username, user.password, user.mnemonic) + // await fairos.podNew(podName1, user.password) + // await fairos.fileUpload(podName1, '/', contentBig, filenameBig) + // await fairos.fileUpload(podName1, '/', contentBig2, filenameBig2) + // + // await fdp.account.login(user.username, user.password) + // const fdpResponse = await fdp.directory.read(podName1, '/', true) + // expect(fdpResponse.files).toHaveLength(2) + // expect(fdpResponse.files[0].name).toEqual(filenameBig) + // expect(fdpResponse.files[1].name).toEqual(filenameBig2) + // + // await fairos.fileDelete(podName1, fullFilenameBigPath) + // const fdpResponse2 = await fdp.directory.read(podName1, '/', true) + // expect(fdpResponse2.files).toHaveLength(1) + // expect(fdpResponse2.files[0].name).toEqual(filenameBig2) + // + // // test mixed interaction (file created from fairos and deleted with fdp) + // await fdp.file.delete(podName1, fullFilenameBigPath2) + // const fdpResponse3 = await fdp.directory.read(podName1, '/', true) + // expect(fdpResponse3.files).toHaveLength(0) + // const fairosDirs = await fairos.dirLs(podName1) + // expect(fairosDirs.data).toEqual({}) + // }) }) describe('Metadata', () => { @@ -539,9 +550,7 @@ describe('Fair Data Protocol with FairOS-dfs', () => { await fairos.fileUpload(podName1, '/', contentBig, filenameBig) const { podAddress, pod } = await getExtendedPodsListByAccountData(fdp.account, podName1) - const rawDirectoryMetadata = ( - await getRawMetadata(fdp.connection.bee, fullNewDirectory1, podAddress, pod.password) - ).metadata as RawDirectoryMetadata + const rawDirectoryMetadata = await getIndexFileContent(fdp.account, podName1, fullNewDirectory1) checkDirectoryMetadata(rawDirectoryMetadata, newDirectory1) const rawFileMetadata = (await getRawMetadata(fdp.connection.bee, fullFilenameBigPath, podAddress, pod.password)) @@ -550,9 +559,7 @@ describe('Fair Data Protocol with FairOS-dfs', () => { await fdp.directory.create(podName1, fullNewDirectory2) await fdp.file.uploadData(podName1, fullFilenameBigPath2, contentBig2) - const rawDirectoryMetadata1 = ( - await getRawMetadata(fdp.connection.bee, fullNewDirectory2, podAddress, pod.password) - ).metadata as RawDirectoryMetadata + const rawDirectoryMetadata1 = await getIndexFileContent(fdp.account, podName1, fullNewDirectory2) checkDirectoryMetadata(rawDirectoryMetadata1, newDirectory2) const rawFileMetadata1 = ( diff --git a/test/integration/node/batch-id/batch-id-only-for-write.spec.ts b/test/integration/node/batch-id/batch-id-only-for-write.spec.ts index 07692d34..6f3c41c4 100644 --- a/test/integration/node/batch-id/batch-id-only-for-write.spec.ts +++ b/test/integration/node/batch-id/batch-id-only-for-write.spec.ts @@ -25,7 +25,7 @@ it('Batch Id only for write operations', async () => { await expect(fdp.file.share(testPod, testFilePath)).rejects.toThrow(error) await expect(fdp.file.delete(testPod, testFilePath)).rejects.toThrow(error) - await expect(fdp.file.uploadDataBlock(testData, 0)).rejects.toThrow(error) + await expect(fdp.file.uploadDataBlock(testData, 0, testData.length)).rejects.toThrow(error) await expect(fdp.file.saveShared(testPod, testFilePath, testReference)).rejects.toThrow(error) await expect(fdp.file.uploadData(testPod, testFilePath, testData)).rejects.toThrow(error) diff --git a/test/integration/node/fdp-class.spec.ts b/test/integration/node/fdp-class.spec.ts index 31568c7b..a5d6289e 100644 --- a/test/integration/node/fdp-class.spec.ts +++ b/test/integration/node/fdp-class.spec.ts @@ -12,7 +12,6 @@ import { PodShareInfo, RawFileMetadata } from '../../../src/pod/types' import { FileShareInfo } from '../../../src/file/types' import { getFeedData } from '../../../src/feed/api' import * as feedApi from '../../../src/feed/api' -import { POD_TOPIC } from '../../../src/pod/personal-storage' import { decryptBytes } from '../../../src/utils/encryption' import { Wallet } from 'ethers' import { removeZeroFromHex } from '../../../src/account/utils' @@ -26,6 +25,7 @@ import { ETH_ADDR_HEX_LENGTH } from '../../../src/utils/type' import * as walletApi from '../../../src/utils/wallet' import { HIGHEST_LEVEL } from '../../../src/feed/lookup/epoch' import { getWalletByIndex } from '../../../src/utils/cache/wallet' +import { getPodV2Topic, POD_TOPIC_V2 } from '../../../src/pod/utils' jest.setTimeout(400000) describe('Fair Data Protocol class', () => { @@ -725,13 +725,15 @@ describe('Fair Data Protocol class', () => { // check pod metadata const pod1 = await fdp.personalStorage.create(pod) - const podData = await getFeedData(bee, POD_TOPIC, prepareEthAddress(user.address)) + const podData = await getFeedData(bee, getPodV2Topic(), prepareEthAddress(user.address)) const encryptedText1 = podData.data.chunkContent().text() const encryptedBytes1 = podData.data.chunkContent() // data decrypts with wallet for the pod. Data inside the pod will be encrypted with a password stored in the pod const decryptedText1 = bytesToString(decryptBytes(privateKey, encryptedBytes1)) expect(encryptedText1).not.toContain(pod) - expect(decryptedText1).toContain(pod) + expect(decryptedText1).toContain('version') + expect(decryptedText1).toContain('filePath') + expect(decryptedText1).toContain(POD_TOPIC_V2) // HDNode with index 1 is for first pod const node1 = await getWalletByIndex(seed, 1) const rootDirectoryData = await getFeedData(bee, '/', prepareEthAddress(node1.address)) @@ -811,15 +813,16 @@ describe('Fair Data Protocol class', () => { await fdpNoCache.personalStorage.create(pod1) // for the first feed write it should be the highest level expect(writeFeedDataSpy.mock.calls[0][5]?.level).toEqual(HIGHEST_LEVEL) - // getting info about pods from the network - expect(getFeedDataSpy).toBeCalledTimes(1) + // getting old V1 pods info + getting V2 pods info + getting V2 pods info for data uploading + 2 additional calls + expect(getFeedDataSpy).toBeCalledTimes(5) // calculating wallet by index for the pod expect(getWalletByIndexSpy).toBeCalledTimes(1) jest.clearAllMocks() await fdpNoCache.personalStorage.create(pod2) expect(writeFeedDataSpy.mock.calls[0][5]?.level).toEqual(HIGHEST_LEVEL - 1) - expect(getFeedDataSpy).toBeCalledTimes(1) + // get V1 pods info + V2 pods info + 2 additional calls + expect(getFeedDataSpy).toBeCalledTimes(4) expect(getWalletByIndexSpy).toBeCalledTimes(1) jest.clearAllMocks() @@ -831,17 +834,17 @@ describe('Fair Data Protocol class', () => { await fdpNoCache.directory.read(pod1, '/', true) // should not write any info expect(writeFeedDataSpy).toBeCalledTimes(0) - // getting info about pods from the network and getting root info of the pod - expect(getFeedDataSpy).toBeCalledTimes(2) - // calculating wallet by index for the pod - expect(getWalletByIndexSpy).toBeCalledTimes(1) + // getting info about pods from the network and getting root info of the pod + 1 additional call + expect(getFeedDataSpy).toBeCalledTimes(3) + // calculating wallet by index for the pod + 1 additional call + expect(getWalletByIndexSpy).toBeCalledTimes(2) jest.clearAllMocks() await fdpNoCache.file.uploadData(pod1, fullFilename, fileContent) // write file metadata + update root dir expect(writeFeedDataSpy).toBeCalledTimes(2) - // get pods info + check file exists + get parent metadata - expect(getFeedDataSpy).toBeCalledTimes(3) + // get pods info + check file exists + get parent metadata + 1 additional call + expect(getFeedDataSpy).toBeCalledTimes(4) // calc the pod wallet expect(getWalletByIndexSpy).toBeCalledTimes(1) @@ -849,8 +852,8 @@ describe('Fair Data Protocol class', () => { await fdpNoCache.file.delete(pod1, fullFilename) // update root dir metadata + write magic word instead of the file expect(writeFeedDataSpy).toBeCalledTimes(2) - // get pods info + check is file available + update root dir metadata - expect(getFeedDataSpy).toBeCalledTimes(3) + // get pods info + check is file available + update root dir metadata + 1 additional call + expect(getFeedDataSpy).toBeCalledTimes(4) // calc the pod wallet expect(getWalletByIndexSpy).toBeCalledTimes(1) @@ -858,8 +861,8 @@ describe('Fair Data Protocol class', () => { await fdpNoCache.personalStorage.delete(pod1) // writing info about pods to the network expect(writeFeedDataSpy).toBeCalledTimes(1) - // get pods info - expect(getFeedDataSpy).toBeCalledTimes(1) + // get V1 pods info + V2 pods info + expect(getFeedDataSpy).toBeCalledTimes(2) expect(getWalletByIndexSpy).toBeCalledTimes(0) jest.restoreAllMocks() @@ -889,15 +892,15 @@ describe('Fair Data Protocol class', () => { await fdpWithCache.personalStorage.create(pod1) // for the first feed write it should be the highest level expect(writeFeedDataSpy.mock.calls[0][5]?.level).toEqual(HIGHEST_LEVEL) - // getting info about pods from the network - expect(getFeedDataSpy).toBeCalledTimes(1) + // getting V1 pods info + V2 pods info + V2 pods info for data uploading + 2 additional calls + expect(getFeedDataSpy).toBeCalledTimes(5) // calculating wallet by index for the pod expect(getWalletByIndexSpy).toBeCalledTimes(1) jest.clearAllMocks() await fdpWithCache.personalStorage.create(pod2) - // data about pods shouldn't be retrieved - expect(getFeedDataSpy).toBeCalledTimes(0) + // V2 pods info for data uploading + 2 additional calls + expect(getFeedDataSpy).toBeCalledTimes(3) // should be calculated correct level without getting previous one from the network expect(writeFeedDataSpy.mock.calls[0][5]?.level).toEqual(HIGHEST_LEVEL - 1) // calc a wallet for the new pod @@ -921,8 +924,8 @@ describe('Fair Data Protocol class', () => { await fdpWithCache.file.uploadData(pod1, fullFilename, fileContent) // write file metadata + update root dir expect(writeFeedDataSpy).toBeCalledTimes(2) - // check file exists + get parent metadata. pods info is cached - expect(getFeedDataSpy).toBeCalledTimes(2) + // check file exists + get parent metadata. pods info is cached + 1 additional call + expect(getFeedDataSpy).toBeCalledTimes(3) // the pod wallet is cached expect(getWalletByIndexSpy).toBeCalledTimes(0) @@ -930,8 +933,8 @@ describe('Fair Data Protocol class', () => { await fdpWithCache.file.delete(pod1, fullFilename) // update root dir metadata + write magic word instead of the file expect(writeFeedDataSpy).toBeCalledTimes(2) - // update root dir metadata + check is file deleted. should not get pods info - expect(getFeedDataSpy).toBeCalledTimes(2) + // update root dir metadata + check is file deleted. should not get pods info + 1 additional call + expect(getFeedDataSpy).toBeCalledTimes(3) // the pod wallet is cached expect(getWalletByIndexSpy).toBeCalledTimes(0) @@ -939,8 +942,8 @@ describe('Fair Data Protocol class', () => { await fdpWithCache.personalStorage.delete(pod1) // writing info about pods to the network expect(writeFeedDataSpy).toBeCalledTimes(1) - // should not get pods info - expect(getFeedDataSpy).toBeCalledTimes(0) + // V2 pods info for data uploading (todo: can be reduced) + expect(getFeedDataSpy).toBeCalledTimes(1) expect(getWalletByIndexSpy).toBeCalledTimes(0) // recovering cache data diff --git a/test/integration/node/pod/pod-limit-error-message.spec.ts b/test/integration/node/pod/pod-limit-error-message.spec.ts deleted file mode 100644 index 430920c0..00000000 --- a/test/integration/node/pod/pod-limit-error-message.spec.ts +++ /dev/null @@ -1,18 +0,0 @@ -import { createFdp, generateRandomHexString, generateUser } from '../../../utils' -import { MAX_POD_NAME_LENGTH } from '../../../../src/pod/utils' - -jest.setTimeout(400000) -it('Pod list size error message', async () => { - const fdp = createFdp() - generateUser(fdp) - - for (let i = 0; i < 25; i++) { - const longPodName = generateRandomHexString(MAX_POD_NAME_LENGTH) - - if (i === 24) { - await expect(fdp.personalStorage.create(longPodName)).rejects.toThrow('Exceeded pod list size by 46 bytes') - } else { - await fdp.personalStorage.create(longPodName) - } - } -}) diff --git a/test/integration/node/pod/pods-limitation-check.spec.ts b/test/integration/node/pod/pods-limitation-check.spec.ts new file mode 100644 index 00000000..62d6d584 --- /dev/null +++ b/test/integration/node/pod/pods-limitation-check.spec.ts @@ -0,0 +1,15 @@ +import { createFdp, generateRandomHexString, generateUser } from '../../../utils' +import { MAX_POD_NAME_LENGTH } from '../../../../src' +import { HIGHEST_LEVEL } from '../../../../src/feed/lookup/epoch' + +jest.setTimeout(400000) +it('Pods limitation check', async () => { + const fdp = createFdp() + generateUser(fdp) + + // todo should work with more updates than HIGHEST_LEVEL + for (let i = 0; i < HIGHEST_LEVEL; i++) { + const longPodName = generateRandomHexString(MAX_POD_NAME_LENGTH) + await fdp.personalStorage.create(longPodName) + } +}) diff --git a/test/integration/node/upload-by-index.spec.ts b/test/integration/node/upload-by-index.spec.ts index 6e5c3f14..79d78f8e 100644 --- a/test/integration/node/upload-by-index.spec.ts +++ b/test/integration/node/upload-by-index.spec.ts @@ -14,7 +14,7 @@ it('Upload by index', async () => { const blocks = [] for (let i = 0; i < blocksCount; i++) { const block = getDataBlock(content, blockSize, i) - blocks.push(await fdp.file.uploadDataBlock(block, i)) + blocks.push(await fdp.file.uploadDataBlock(block, i, block.length)) } generateUser(fdp) diff --git a/test/integration/node/upload-progress.spec.ts b/test/integration/node/upload-progress.spec.ts index 6df72550..5bba7c1c 100644 --- a/test/integration/node/upload-progress.spec.ts +++ b/test/integration/node/upload-progress.spec.ts @@ -1,7 +1,6 @@ import { createFdp, generateRandomHexString, generateUser } from '../../utils' import { wrapBytesWithHelpers } from '../../../src/utils/bytes' import { UploadProgressInfo } from '../../../src' -import { DEFAULT_UPLOAD_OPTIONS } from '../../../src/content-items/handler' jest.setTimeout(400000) it('Fair Data Protocol upload progress', async () => { @@ -9,7 +8,6 @@ it('Fair Data Protocol upload progress', async () => { generateUser(fdp) const pod = generateRandomHexString() const fileSizeBig = 5000005 - const blocksCount = Math.ceil(fileSizeBig / DEFAULT_UPLOAD_OPTIONS.blockSize!) const contentBig = generateRandomHexString(fileSizeBig) const filenameBig = generateRandomHexString() + '.txt' const fullFilenameBigPath = '/' + filenameBig @@ -31,8 +29,5 @@ it('Fair Data Protocol upload progress', async () => { expect(fileInfoBig.name).toEqual(filenameBig) expect(fileInfoBig.size).toEqual(fileSizeBig) - // multiply `blocksCount` by 2 because each block has two events. - // the 6 other events from `UploadProgressType` occur once each - const totalEvents = blocksCount * 2 + 6 - expect(callbackData.length).toEqual(totalEvents) + expect(callbackData.length).toEqual(16) }) diff --git a/test/migration/node/fdp-storage-v1-install.ts b/test/migration/node/fdp-storage-v1-install.ts new file mode 100644 index 00000000..db7c2ab7 --- /dev/null +++ b/test/migration/node/fdp-storage-v1-install.ts @@ -0,0 +1,31 @@ +import { access, readFile, writeFile } from 'fs/promises' +import { runCommand } from '../../utils/system' + +async function removePuppeteerDependency() { + const packageJsonPath = 'test/fdp-storage/package.json' + const packageJson = JSON.parse(await readFile(packageJsonPath, 'utf-8')) + + delete packageJson.devDependencies['@types/expect-puppeteer'] + delete packageJson.devDependencies['@types/jest-environment-puppeteer'] + delete packageJson.devDependencies['jest-puppeteer'] + delete packageJson.devDependencies['puppeteer'] + + await writeFile(packageJsonPath, JSON.stringify(packageJson)) +} + +export async function installFdpStorageV1() { + try { + await access('test/fdp-storage') + + return + // eslint-disable-next-line no-empty + } catch (error) {} + + await runCommand(`cd test && git clone -b test/v1-test-branch https://github.com/fairDataSociety/fdp-storage.git`) + + await removePuppeteerDependency() + + await runCommand('cd test/fdp-storage && npm install') + + await runCommand('cd test/fdp-storage && npm run compile:node && npm run compile:types') +} diff --git a/test/migration/node/migration.spec.ts b/test/migration/node/migration.spec.ts new file mode 100644 index 00000000..e0231847 --- /dev/null +++ b/test/migration/node/migration.spec.ts @@ -0,0 +1,101 @@ +import { batchId, beeUrl, createFdp, generateUser } from '../../utils' +import { wrapBytesWithHelpers } from '../../../src/utils/bytes' +import { installFdpStorageV1 } from './fdp-storage-v1-install' +import { runCommand } from '../../utils/system' +import { FdpStorage } from '../../../src' +import { PodsVersion, extractPodsV2, getExtendedPodsListByAccountData, getPodsData } from '../../../src/pod/utils' +import { prepareEthAddress, privateKeyToBytes } from '../../../src/utils/wallet' +import { readDirectory } from '../../../src/directory/handler' + +describe('Migration tests', () => { + let fdp: FdpStorage + let mnemonic: string + + beforeAll(async () => { + await installFdpStorageV1() + + fdp = createFdp() + const user = generateUser(fdp) + mnemonic = user.mnemonic + }) + + it('V1 pods, directories and files should be migrated to V2', async () => { + await runCommand(`node test/migration/node/prepare-data.ts "${mnemonic}" "${beeUrl()}" "${batchId()}"`) + + const pods = await fdp.personalStorage.list() + + expect(pods.pods.length).toEqual(2) + + const pod = pods.pods[0] + + const folder1 = await fdp.directory.read(pod.name, '/folder1') + + expect(folder1.directories.length).toEqual(1) + expect(folder1.directories[0].name).toEqual('folder2') + + expect(folder1.files.length).toEqual(1) + expect(folder1.files[0].name).toEqual('file2.txt') + + const folder2 = await fdp.directory.read(pod.name, '/folder1/folder2') + + expect(folder2.directories.length).toEqual(1) + expect(folder2.files.length).toEqual(0) + + const root = await fdp.directory.read(pod.name, '/') + + expect(root.directories.length).toEqual(1) + expect(root.directories[0].name).toEqual('folder1') + + expect(root.files.length).toEqual(1) + expect(root.files[0].name).toEqual('file1.txt') + + const file1 = await fdp.file.downloadData(pod.name, '/file1.txt') + + expect(wrapBytesWithHelpers(file1).text()).toEqual('nakamoto\n') + + let deepDir = await fdp.directory.read(pod.name, '/folder1/folder2/3/4/5') + + expect(deepDir.directories.length).toEqual(0) + expect(deepDir.files.length).toEqual(1) + expect(deepDir.files[0].name).toEqual('file2.txt') + + deepDir = await fdp.directory.read(pod.name, '/folder1/folder2/3') + + expect(deepDir.directories.length).toEqual(1) + expect(deepDir.files.length).toEqual(0) + }) + + it('Pods and directories should be stored in V2 structure', async () => { + const podData = await getPodsData(fdp.connection.bee, prepareEthAddress(fdp.account.wallet!.address)) + + expect(podData.podsVersion).toEqual(PodsVersion.V2) + + const pods = await extractPodsV2( + fdp.account, + podData.lookupAnswer.data.chunkContent(), + privateKeyToBytes(fdp.account.wallet!.privateKey), + ) + + expect(pods.pods.length).toEqual(2) + expect(pods.pods[0].name).toEqual('pod1') + expect(pods.pods[1].name).toEqual('pod2') + + const { podAddress, pod } = await getExtendedPodsListByAccountData(fdp.account, 'pod1') + + const root = await readDirectory(fdp.account, 'pod1', '/', podAddress, pod.password) + + expect(root.directories.length).toEqual(1) + expect(root.directories[0].name).toEqual('folder1') + + expect(root.files.length).toEqual(1) + expect(root.files[0].name).toEqual('file1.txt') + + const folder1 = await readDirectory(fdp.account, 'pod1', '/folder1', podAddress, pod.password) + + expect(folder1.directories.length).toEqual(1) + expect(folder1.directories[0].name).toEqual('folder2') + + expect(folder1.files.length).toEqual(1) + expect(folder1.files[0].name).toEqual('file2.txt') + }) +}) diff --git a/test/migration/node/prepare-data.ts b/test/migration/node/prepare-data.ts new file mode 100644 index 00000000..0f8b7373 --- /dev/null +++ b/test/migration/node/prepare-data.ts @@ -0,0 +1,44 @@ +/* eslint-disable no-console */ +/* eslint-disable @typescript-eslint/no-var-requires */ +const fdpV1 = require('../../fdp-storage/dist/index.js') +const path = require('path') +const { readFileSync } = require('fs') + +const filesPath = path.resolve(__dirname, '../../data-unit/directory-utils') + +async function run() { + const mnemonic = process.argv[2] + const beeUrl = process.argv[3] + const batchId = process.argv[4] + + const fdp = new fdpV1.FdpStorage(beeUrl, batchId) + + fdp.account.setAccountFromMnemonic(mnemonic) + + await fdp.personalStorage.create('pod1') + + await fdp.personalStorage.create('pod2') + + await fdp.file.uploadData('pod1', '/file1.txt', readFileSync(filesPath + '/file1.txt')) + + await fdp.directory.create('pod1', '/folder1') + + await fdp.file.uploadData('pod1', '/folder1/file2.txt', readFileSync(filesPath + '/file2.txt')) + + await fdp.directory.create('pod1', '/folder1/folder2') + await fdp.directory.create('pod1', '/folder1/folder2/3') + await fdp.directory.create('pod1', '/folder1/folder2/3/4') + await fdp.directory.create('pod1', '/folder1/folder2/3/4/5') + + await fdp.file.uploadData('pod1', '/folder1/folder2/3/4/5/file2.txt', readFileSync(filesPath + '/file2.txt')) +} + +run() + .then(() => { + console.log('Success') + process.exit(0) + }) + .catch(error => { + console.error(error) + process.exit(1) + }) diff --git a/test/unit/feed/api.spec.ts b/test/unit/feed/api.spec.ts new file mode 100644 index 00000000..ebc7c65f --- /dev/null +++ b/test/unit/feed/api.spec.ts @@ -0,0 +1,27 @@ +import { getFeedData, writeFeedData } from '../../../src/feed/api' +import { createFdp, generateUser } from '../../utils' +import { stringToBytes } from '../../../src/utils/bytes' +import { prepareEthAddress, preparePrivateKey } from '../../../src/utils/wallet' +import { decryptBytes } from '../../../src/utils/encryption' +import { bytesToHex } from '../../../src/utils/hex' +import { getNextEpoch } from '../../../src/feed/lookup/utils' + +jest.setTimeout(400000) +describe('feed/api', () => { + it('write-read check', async () => { + const fdp = createFdp() + generateUser(fdp) + + const wallet = fdp.account.wallet! + const podPassword = preparePrivateKey(wallet.privateKey) + const topic = '/' + const data = stringToBytes(JSON.stringify({ hello: 'world of bees' })) + await writeFeedData(fdp.connection, topic, data, wallet.privateKey, podPassword) + const feedData = await getFeedData(fdp.connection.bee, topic, prepareEthAddress(wallet.address)) + await writeFeedData(fdp.connection, topic, data, wallet.privateKey, podPassword, getNextEpoch(feedData.epoch)) + const feedData1 = await getFeedData(fdp.connection.bee, topic, prepareEthAddress(wallet.address)) + const result = decryptBytes(bytesToHex(podPassword), feedData1.data.chunkContent()) + + expect(result).toEqual(data) + }) +}) diff --git a/test/unit/feed/epoch.spec.ts b/test/unit/feed/epoch.spec.ts index d1a40abf..31207901 100644 --- a/test/unit/feed/epoch.spec.ts +++ b/test/unit/feed/epoch.spec.ts @@ -1,4 +1,5 @@ -import { Epoch, getBaseTime } from '../../../src/feed/lookup/epoch' +import { Epoch, getBaseTime, HIGHEST_LEVEL } from '../../../src/feed/lookup/epoch' +import { bytesToHex } from '../../../src/utils/hex' describe('feed/epoch', () => { it('getBaseTime', () => { @@ -101,4 +102,20 @@ describe('feed/epoch', () => { expect(result.time).toEqual(example.result.time) } }) + + it('getNextEpoch limits', () => { + const ids: { [key: string]: boolean } = {} + let epoch = new Epoch(HIGHEST_LEVEL, 1648635721) + // todo should work with more updates than HIGHEST_LEVEL + for (let i = 0; i <= HIGHEST_LEVEL; i++) { + epoch = epoch.getNextEpoch(epoch.time) + const id = bytesToHex(epoch.id()) as string + + if (ids[id]) { + throw new Error('epoch id already exists') + } else { + ids[id] = true + } + } + }) }) diff --git a/test/unit/file/handler.spec.ts b/test/unit/file/handler.spec.ts deleted file mode 100644 index 41e6367c..00000000 --- a/test/unit/file/handler.spec.ts +++ /dev/null @@ -1,32 +0,0 @@ -import { generateBlockName } from '../../../src/file/handler' - -describe('file/handler', () => { - it('generateBlockName', () => { - const examples = [ - { - data: 0, - result: 'block-00000', - }, - { - data: 1, - result: 'block-00001', - }, - { - data: 11, - result: 'block-00011', - }, - { - data: 11111, - result: 'block-11111', - }, - { - data: 99999, - result: 'block-99999', - }, - ] - - for (const example of examples) { - expect(generateBlockName(example.data)).toEqual(example.result) - } - }) -}) diff --git a/test/unit/utils/compression.spec.ts b/test/unit/utils/compression.spec.ts new file mode 100644 index 00000000..0466797e --- /dev/null +++ b/test/unit/utils/compression.spec.ts @@ -0,0 +1,24 @@ +import { compress, decompress } from '../../../src/utils/compression' +import { bytesToString, stringToBytes } from '../../../src/utils/bytes' +import { Utils } from '@ethersphere/bee-js' + +describe('utils/compression', () => { + it('should compress and decompress', () => { + const data = 'Hello world' + const compressed = compress(stringToBytes(data)) + expect(compressed).not.toEqual(data) + + const decompressed = decompress(compressed) + expect(bytesToString(decompressed)).toEqual(data) + }) + + it('should decompress fairos compression', () => { + const data = 'Hello world' + const fairosCompressed = Utils.hexToBytes( + '1f8b080000096e8800ff000b00f4ff48656c6c6f20776f726c64000000ffff0300529ed68b0b000000', + ) + + const decompressed = decompress(fairosCompressed) + expect(bytesToString(decompressed)).toEqual(data) + }) +}) diff --git a/test/utils/system.ts b/test/utils/system.ts new file mode 100644 index 00000000..381d314d --- /dev/null +++ b/test/utils/system.ts @@ -0,0 +1,13 @@ +import { exec } from 'child_process' + +export async function runCommand(command: string): Promise { + return new Promise((resolve, reject) => { + exec(command, (error, stdout) => { + if (error) { + return reject(error) + } + + resolve(stdout) + }) + }) +} diff --git a/tsconfig.test.json b/tsconfig.test.json index 04dc2493..7b852143 100644 --- a/tsconfig.test.json +++ b/tsconfig.test.json @@ -5,6 +5,8 @@ "test", "jest.config.ts", ".eslintrc.js", + "jest-puppeteer.config.js", + "puppeteer.config.js", ], "compilerOptions": { "noEmit": true,