From 97e046dfd2be62870b0ff4024aa17273340a2e2b Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Tue, 26 Oct 2021 14:12:14 -0500 Subject: [PATCH 001/101] run node tests in github actions --- .github/workflows/node_tests.yml | 41 ++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 .github/workflows/node_tests.yml diff --git a/.github/workflows/node_tests.yml b/.github/workflows/node_tests.yml new file mode 100644 index 000000000..0fb7d72f7 --- /dev/null +++ b/.github/workflows/node_tests.yml @@ -0,0 +1,41 @@ +name: Node Tests + +on: + push: + branches: [master] + pull_request: + branches: [master] + +jobs: + build: + strategy: + fail-fast: false + matrix: + platform: [ubuntu-latest, macos-latest, windows-latest] + node: [14] + + runs-on: ${{ matrix.platform }} + + steps: + - uses: actions/checkout@v2 + - name: Set git name/email + run: | + git config --global user.email "bids.maintenance@gmail.com" + git config --global user.name "bids-maintenance" + - name: Set up Python ${{ matrix.python-version }} + run: npm install -g npm@^7 + run: npm install + - name: Eslint + run: npm run lint + -name: Get bids-examples data + run: git submodule update --init + - name: Jest tests + run: npm run coverage -- --maxWorkers=2 --testTimeout=10000 + - name: Upload to codecov + run: npm run codecov + - name: Smoke tests + run: | + bids-validator/bin/bids-validator bids-validator/tests/data/valid_headers/ --ignoreNiftiHeaders + bids-validator/bin/bids-validator bids-validator/tests/data/valid_headers/ --ignoreNiftiHeaders --json + + From 047005556c28caec2103087a46838d7da59c9cfd Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Tue, 26 Oct 2021 14:15:15 -0500 Subject: [PATCH 002/101] fix yml syntax error --- .github/workflows/node_tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/node_tests.yml b/.github/workflows/node_tests.yml index 0fb7d72f7..cd59b471f 100644 --- a/.github/workflows/node_tests.yml +++ b/.github/workflows/node_tests.yml @@ -27,7 +27,7 @@ jobs: run: npm install - name: Eslint run: npm run lint - -name: Get bids-examples data + - name: Get bids-examples data run: git submodule update --init - name: Jest tests run: npm run coverage -- --maxWorkers=2 --testTimeout=10000 From 9c4a1a7583562a0b67740b4f7b63bc9597bc99c1 Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Tue, 26 Oct 2021 14:20:20 -0500 Subject: [PATCH 003/101] fix yml syntax error --- .github/workflows/node_tests.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/node_tests.yml b/.github/workflows/node_tests.yml index cd59b471f..f4b418fdb 100644 --- a/.github/workflows/node_tests.yml +++ b/.github/workflows/node_tests.yml @@ -23,8 +23,9 @@ jobs: git config --global user.email "bids.maintenance@gmail.com" git config --global user.name "bids-maintenance" - name: Set up Python ${{ matrix.python-version }} - run: npm install -g npm@^7 - run: npm install + run: | + npm install -g npm@^7 + npm install - name: Eslint run: npm run lint - name: Get bids-examples data From 3bcc8be9480f725a33c25dbc3dd81745fcec8278 Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Tue, 26 Oct 2021 14:26:03 -0500 Subject: [PATCH 004/101] use node action to actually setup node --- .github/workflows/node_tests.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/node_tests.yml b/.github/workflows/node_tests.yml index f4b418fdb..fe524c5bb 100644 --- a/.github/workflows/node_tests.yml +++ b/.github/workflows/node_tests.yml @@ -18,11 +18,15 @@ jobs: steps: - uses: actions/checkout@v2 + - uses: actions/setup-node@v2 + with: + node-version: '14' + cache: 'npm' - name: Set git name/email run: | git config --global user.email "bids.maintenance@gmail.com" git config --global user.name "bids-maintenance" - - name: Set up Python ${{ matrix.python-version }} + - name: Set up Node run: | npm install -g npm@^7 npm install From 77f1cea677d8509532a0d5fa6fc2df476dd30c9a Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Tue, 26 Oct 2021 14:34:10 -0500 Subject: [PATCH 005/101] try __dirname based join to find testdata instead of relying on process.cwd --- bids-validator/tests/cli.spec.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bids-validator/tests/cli.spec.js b/bids-validator/tests/cli.spec.js index ae5735eee..f7f2de177 100644 --- a/bids-validator/tests/cli.spec.js +++ b/bids-validator/tests/cli.spec.js @@ -2,7 +2,7 @@ import cli from '../cli' import path from 'path' const dir = process.cwd() -const data_dir = path.join(dir, 'bids-validator', 'tests', 'data') +const data_dir = path.join(__dirname, 'data') const data_with_errors = path.join(data_dir, 'empty_files') const data_without_errors = path.join(data_dir, 'valid_dataset') From 3e0a391b8f7fc980299cdabbaafe1f8e295c0000 Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Tue, 26 Oct 2021 14:36:22 -0500 Subject: [PATCH 006/101] shut eslint up --- bids-validator/tests/cli.spec.js | 1 - 1 file changed, 1 deletion(-) diff --git a/bids-validator/tests/cli.spec.js b/bids-validator/tests/cli.spec.js index f7f2de177..93d225cff 100644 --- a/bids-validator/tests/cli.spec.js +++ b/bids-validator/tests/cli.spec.js @@ -1,7 +1,6 @@ import cli from '../cli' import path from 'path' -const dir = process.cwd() const data_dir = path.join(__dirname, 'data') const data_with_errors = path.join(data_dir, 'empty_files') const data_without_errors = path.join(data_dir, 'valid_dataset') From 677c9f9700d6fb0fb969a24a22da987819e36953 Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Tue, 26 Oct 2021 15:18:08 -0500 Subject: [PATCH 007/101] Revert "shut eslint up" This reverts commit 3e0a391b8f7fc980299cdabbaafe1f8e295c0000. --- bids-validator/tests/cli.spec.js | 1 + 1 file changed, 1 insertion(+) diff --git a/bids-validator/tests/cli.spec.js b/bids-validator/tests/cli.spec.js index 93d225cff..f7f2de177 100644 --- a/bids-validator/tests/cli.spec.js +++ b/bids-validator/tests/cli.spec.js @@ -1,6 +1,7 @@ import cli from '../cli' import path from 'path' +const dir = process.cwd() const data_dir = path.join(__dirname, 'data') const data_with_errors = path.join(data_dir, 'empty_files') const data_without_errors = path.join(data_dir, 'valid_dataset') From 21fde2d274efed258d8566b139611884fff35c0f Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Tue, 26 Oct 2021 15:18:15 -0500 Subject: [PATCH 008/101] Revert "try __dirname based join to find testdata instead of relying on process.cwd" This reverts commit 77f1cea677d8509532a0d5fa6fc2df476dd30c9a. --- bids-validator/tests/cli.spec.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bids-validator/tests/cli.spec.js b/bids-validator/tests/cli.spec.js index f7f2de177..ae5735eee 100644 --- a/bids-validator/tests/cli.spec.js +++ b/bids-validator/tests/cli.spec.js @@ -2,7 +2,7 @@ import cli from '../cli' import path from 'path' const dir = process.cwd() -const data_dir = path.join(__dirname, 'data') +const data_dir = path.join(dir, 'bids-validator', 'tests', 'data') const data_with_errors = path.join(data_dir, 'empty_files') const data_without_errors = path.join(data_dir, 'valid_dataset') From 0a486229147ba9591aeecb5a7fb4bc43a1fbee04 Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Fri, 7 Jan 2022 13:38:29 -0600 Subject: [PATCH 009/101] add --colors to jest call --- .github/workflows/node_tests.yml | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/.github/workflows/node_tests.yml b/.github/workflows/node_tests.yml index fe524c5bb..6b4e310a0 100644 --- a/.github/workflows/node_tests.yml +++ b/.github/workflows/node_tests.yml @@ -20,8 +20,8 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: '14' - cache: 'npm' + node-version: "14" + cache: "npm" - name: Set git name/email run: | git config --global user.email "bids.maintenance@gmail.com" @@ -35,12 +35,10 @@ jobs: - name: Get bids-examples data run: git submodule update --init - name: Jest tests - run: npm run coverage -- --maxWorkers=2 --testTimeout=10000 + run: npm run coverage -- --maxWorkers=2 --testTimeout=10000 --colors - name: Upload to codecov run: npm run codecov - name: Smoke tests run: | bids-validator/bin/bids-validator bids-validator/tests/data/valid_headers/ --ignoreNiftiHeaders bids-validator/bin/bids-validator bids-validator/tests/data/valid_headers/ --ignoreNiftiHeaders --json - - From 56614cd14f52d97f1e58757b132c3428637cc588 Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Fri, 7 Jan 2022 13:44:52 -0600 Subject: [PATCH 010/101] Revert "add --colors to jest call" This reverts commit 0a486229147ba9591aeecb5a7fb4bc43a1fbee04. --- .github/workflows/node_tests.yml | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/.github/workflows/node_tests.yml b/.github/workflows/node_tests.yml index 6b4e310a0..fe524c5bb 100644 --- a/.github/workflows/node_tests.yml +++ b/.github/workflows/node_tests.yml @@ -20,8 +20,8 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: "14" - cache: "npm" + node-version: '14' + cache: 'npm' - name: Set git name/email run: | git config --global user.email "bids.maintenance@gmail.com" @@ -35,10 +35,12 @@ jobs: - name: Get bids-examples data run: git submodule update --init - name: Jest tests - run: npm run coverage -- --maxWorkers=2 --testTimeout=10000 --colors + run: npm run coverage -- --maxWorkers=2 --testTimeout=10000 - name: Upload to codecov run: npm run codecov - name: Smoke tests run: | bids-validator/bin/bids-validator bids-validator/tests/data/valid_headers/ --ignoreNiftiHeaders bids-validator/bin/bids-validator bids-validator/tests/data/valid_headers/ --ignoreNiftiHeaders --json + + From 50a11d9b9bd577eee17f5c7a6e1068941bc67cb7 Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Fri, 7 Jan 2022 13:47:26 -0600 Subject: [PATCH 011/101] Revert "Revert "add --colors to jest call"" This reverts commit 56614cd14f52d97f1e58757b132c3428637cc588. --- .github/workflows/node_tests.yml | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/.github/workflows/node_tests.yml b/.github/workflows/node_tests.yml index fe524c5bb..6b4e310a0 100644 --- a/.github/workflows/node_tests.yml +++ b/.github/workflows/node_tests.yml @@ -20,8 +20,8 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: '14' - cache: 'npm' + node-version: "14" + cache: "npm" - name: Set git name/email run: | git config --global user.email "bids.maintenance@gmail.com" @@ -35,12 +35,10 @@ jobs: - name: Get bids-examples data run: git submodule update --init - name: Jest tests - run: npm run coverage -- --maxWorkers=2 --testTimeout=10000 + run: npm run coverage -- --maxWorkers=2 --testTimeout=10000 --colors - name: Upload to codecov run: npm run codecov - name: Smoke tests run: | bids-validator/bin/bids-validator bids-validator/tests/data/valid_headers/ --ignoreNiftiHeaders bids-validator/bin/bids-validator bids-validator/tests/data/valid_headers/ --ignoreNiftiHeaders --json - - From 24150f4e29d27a93cebfb00a50c4d1218002c109 Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Fri, 7 Jan 2022 14:27:11 -0600 Subject: [PATCH 012/101] will --colors work for windows gitlab? --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 130a46396..d9bb29d8d 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -21,4 +21,4 @@ tests: - git checkout $CI_COMMIT_REF_NAME - git submodule update --init --depth 1 - npm install - - npm run test -- --testTimeout 30000 + - npm run test -- --testTimeout 30000 --colors From 59a93d8c5b9ce662bacfcb3ace1ddb6ca32761f3 Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Fri, 7 Jan 2022 14:56:05 -0600 Subject: [PATCH 013/101] Revert "will --colors work for windows gitlab?" This reverts commit 24150f4e29d27a93cebfb00a50c4d1218002c109. --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index d9bb29d8d..130a46396 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -21,4 +21,4 @@ tests: - git checkout $CI_COMMIT_REF_NAME - git submodule update --init --depth 1 - npm install - - npm run test -- --testTimeout 30000 --colors + - npm run test -- --testTimeout 30000 From 93bcebb30ba7c7f1e0fbc7a41fd2dfbcaa1cedbb Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Fri, 7 Jan 2022 15:15:09 -0600 Subject: [PATCH 014/101] putting command after ./ in package.json lint call might allow it to play nicer with windows. --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index f4932c61f..b1cbf8822 100644 --- a/package.json +++ b/package.json @@ -5,7 +5,7 @@ "bids-validator-web" ], "scripts": { - "lint": "./node_modules/eslint/bin/eslint.js ./bids-validator/**/*.js", + "lint": "./\"node_modules/eslint/bin/eslint.js\" ./bids-validator/**/*.js", "coverage": "./node_modules/.bin/jest --coverage", "codecov": "./node_modules/.bin/codecov", "test": "node bids-validator/bin/test-submodule-exists && jest", From b512edcc1a1a5331402adccb1e3e35962d0f3bfa Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Fri, 7 Jan 2022 15:25:32 -0600 Subject: [PATCH 015/101] Revert "putting command after ./ in package.json lint call might allow it to play nicer with windows." This reverts commit 93bcebb30ba7c7f1e0fbc7a41fd2dfbcaa1cedbb. --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index b1cbf8822..f4932c61f 100644 --- a/package.json +++ b/package.json @@ -5,7 +5,7 @@ "bids-validator-web" ], "scripts": { - "lint": "./\"node_modules/eslint/bin/eslint.js\" ./bids-validator/**/*.js", + "lint": "./node_modules/eslint/bin/eslint.js ./bids-validator/**/*.js", "coverage": "./node_modules/.bin/jest --coverage", "codecov": "./node_modules/.bin/codecov", "test": "node bids-validator/bin/test-submodule-exists && jest", From 703f336976374eb6895335a386f8e87baf0806be Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Fri, 7 Jan 2022 15:31:19 -0600 Subject: [PATCH 016/101] remove relative paths from npm scripts --- package.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/package.json b/package.json index f4932c61f..ef8a4c015 100644 --- a/package.json +++ b/package.json @@ -5,9 +5,9 @@ "bids-validator-web" ], "scripts": { - "lint": "./node_modules/eslint/bin/eslint.js ./bids-validator/**/*.js", - "coverage": "./node_modules/.bin/jest --coverage", - "codecov": "./node_modules/.bin/codecov", + "lint": "eslint ./bids-validator/**/*.js", + "coverage": "jest --coverage", + "codecov": "codecov", "test": "node bids-validator/bin/test-submodule-exists && jest", "postversion": "lerna version prerelease --preid dev --ignore-scripts --yes --force-publish=*", "web-dev": "cd bids-validator-web && npm run dev", From a8add554364166c2619f628fe833cb41aba98a73 Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Fri, 7 Jan 2022 15:56:50 -0600 Subject: [PATCH 017/101] add eslint ignore and update eslint call to use config file --- .eslintignore | 2 ++ package.json | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 .eslintignore diff --git a/.eslintignore b/.eslintignore new file mode 100644 index 000000000..b94707787 --- /dev/null +++ b/.eslintignore @@ -0,0 +1,2 @@ +node_modules/ +dist/ diff --git a/package.json b/package.json index ef8a4c015..724a02f69 100644 --- a/package.json +++ b/package.json @@ -5,7 +5,7 @@ "bids-validator-web" ], "scripts": { - "lint": "eslint ./bids-validator/**/*.js", + "lint": "eslint -c bids-validator/.eslintrc.json bids-validator/", "coverage": "jest --coverage", "codecov": "codecov", "test": "node bids-validator/bin/test-submodule-exists && jest", From bb83792dc8cd67e66911cf99fe5a6a5c06f0c0f2 Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Fri, 7 Jan 2022 16:33:27 -0600 Subject: [PATCH 018/101] eslint/prettier fixes --- bids-validator/cli.js | 6 +- .../utils/__tests__/filenamesOnly.spec.js | 1 + .../utils/__tests__/gitTreeMode.spec.js | 59 ++--- bids-validator/utils/__tests__/unit.spec.js | 15 +- .../files/__tests__/readDir-examples.spec.js | 2 +- .../utils/files/generateMergedSidecarDict.js | 4 +- bids-validator/utils/files/readDir.js | 14 +- bids-validator/utils/files/readFile.js | 4 +- bids-validator/utils/issues/index.js | 3 + .../__tests__/collectSubjectMetadata.spec.js | 12 +- .../utils/summary/collectSessions.js | 2 +- .../utils/summary/collectSubjects.js | 2 +- .../__tests__/checkDatasetDescription.spec.js | 15 +- bids-validator/validators/bids/start.js | 2 +- bids-validator/validators/nifti/nii.js | 4 +- bids-validator/validators/tsv/checkTypeCol.js | 128 ++++++----- bids-validator/validators/tsv/index.js | 2 +- bids-validator/validators/tsv/tsv.js | 17 +- .../validators/tsv/validateContRecordings.js | 6 +- .../validators/tsv/validateTsvColumns.js | 205 ++++++++++-------- 20 files changed, 286 insertions(+), 217 deletions(-) diff --git a/bids-validator/cli.js b/bids-validator/cli.js index 0cd53ef95..74f6ced0a 100644 --- a/bids-validator/cli.js +++ b/bids-validator/cli.js @@ -8,7 +8,7 @@ import colors from 'colors/safe' import fs from 'fs' import { filenamesOnly } from './utils/filenamesOnly.js' -const errorToString = (err) => { +const errorToString = err => { if (err instanceof Error) return err.stack else if (typeof err === 'object') return JSON.parse(err) else return err @@ -39,7 +39,7 @@ export function cli(argumentOverride) { const argv = parseOptions(argumentOverride) const dir = argv._[0] const options = argv - process.on('unhandledRejection', (err) => { + process.on('unhandledRejection', err => { console.log( format.unexpectedError( // eslint-disable-next-line @@ -61,7 +61,7 @@ export function cli(argumentOverride) { reject(2) } - validate.BIDS(dir, options, function (issues, summary) { + validate.BIDS(dir, options, function(issues, summary) { function resolveOrReject() { if ( issues === 'Invalid' || diff --git a/bids-validator/utils/__tests__/filenamesOnly.spec.js b/bids-validator/utils/__tests__/filenamesOnly.spec.js index 79ca47454..63e37200b 100644 --- a/bids-validator/utils/__tests__/filenamesOnly.spec.js +++ b/bids-validator/utils/__tests__/filenamesOnly.spec.js @@ -2,6 +2,7 @@ import { validateFilenames } from '../filenamesOnly.js' describe('test filenames mode', () => { beforeEach(() => { + // eslint-disable-next-line console.log = jest.fn() }) it('throws an error when obviously non-BIDS input', async () => { diff --git a/bids-validator/utils/__tests__/gitTreeMode.spec.js b/bids-validator/utils/__tests__/gitTreeMode.spec.js index 81248ad68..2e1bd7ccc 100644 --- a/bids-validator/utils/__tests__/gitTreeMode.spec.js +++ b/bids-validator/utils/__tests__/gitTreeMode.spec.js @@ -87,36 +87,41 @@ describe('gitTreeMode functions', () => { size: 1, }, ] - const expected = [ - { - path: '/path/to/dataset/path/to/a', - size: 100, - relativePath: '/path/to/a', - name: 'a', - }, - { - path: '/path/to/dataset/path/to/b', - size: 99, - relativePath: '/path/to/b', - name: 'b', - }, - { - path: '/path/to/dataset/path/to/c', - size: 98, - relativePath: '/path/to/c', - name: 'c', - }, - { - path: '/path/to/dataset/path/to/d', - size: 1, - relativePath: '/path/to/d', - name: 'd', - }, - ] + /* Not currently in use. + const expected = [ + { + path: '/path/to/dataset/path/to/a', + size: 100, + relativePath: '/path/to/a', + name: 'a', + }, + { + path: '/path/to/dataset/path/to/b', + size: 99, + relativePath: '/path/to/b', + name: 'b', + }, + { + path: '/path/to/dataset/path/to/c', + size: 98, + relativePath: '/path/to/c', + name: 'c', + }, + { + path: '/path/to/dataset/path/to/d', + size: 1, + relativePath: '/path/to/d', + name: 'd', + }, + ] + */ const output = processFiles('/path/to/dataset', ig, filesA, filesB) const fileNames = output.map(file => file.name) assert(!fileNames.includes('.DS_Store'), 'filters out ignored files') - assert(!fileNames.includes('derivative_file'), 'filters out ignored directories',) + assert( + !fileNames.includes('derivative_file'), + 'filters out ignored directories', + ) assert.deepEqual(fileNames, ['a', 'b', 'c', 'd'], 'aggregates files') assert.isString(output[0].relativePath, 'adds relativePath to files') assert.isString(output[1].relativePath, 'adds name to files') diff --git a/bids-validator/utils/__tests__/unit.spec.js b/bids-validator/utils/__tests__/unit.spec.js index 9dc116706..9ded667ce 100644 --- a/bids-validator/utils/__tests__/unit.spec.js +++ b/bids-validator/utils/__tests__/unit.spec.js @@ -9,7 +9,11 @@ describe('unit validator', () => { const goodOutput = unit.validate(validRoot) expect(goodOutput.isValid).toBe(true) }) - const invalidRoots = ['definitielynotavalidroot', `%/${validRoot}`, `n/a*${validRoot}`] + const invalidRoots = [ + 'definitielynotavalidroot', + `%/${validRoot}`, + `n/a*${validRoot}`, + ] invalidRoots.forEach(invalidRoot => { const badOutput = unit.validate(invalidRoot) expect(badOutput.isValid).toBe(false) @@ -25,7 +29,14 @@ describe('unit validator', () => { expect(badOutput.isValid).toBe(false) }) - const validExponents = ['^2', '^543', '¹²³', ...unit.superscriptNumbers.slice(0, 3), '^-2', '⁻³'] + const validExponents = [ + '^2', + '^543', + '¹²³', + ...unit.superscriptNumbers.slice(0, 3), + '^-2', + '⁻³', + ] it('handles simple units with exponents', () => { validExponents.forEach(exp => { const goodOutput = unit.validate(validRoot + exp) diff --git a/bids-validator/utils/files/__tests__/readDir-examples.spec.js b/bids-validator/utils/files/__tests__/readDir-examples.spec.js index f677a2e8d..10f9bde29 100644 --- a/bids-validator/utils/files/__tests__/readDir-examples.spec.js +++ b/bids-validator/utils/files/__tests__/readDir-examples.spec.js @@ -12,7 +12,7 @@ describe('readDir.js - examples integration', () => { 'sub-02_task-mixedeventrelatedprobe_run-01_bold.nii.gz', ) expect(filenames[200]).toBe( - 'sub-14_task-probabilisticclassification_run-02_events.tsv' + 'sub-14_task-probabilisticclassification_run-02_events.tsv', ) done() }) diff --git a/bids-validator/utils/files/generateMergedSidecarDict.js b/bids-validator/utils/files/generateMergedSidecarDict.js index ad1cb5f67..bc68746e2 100644 --- a/bids-validator/utils/files/generateMergedSidecarDict.js +++ b/bids-validator/utils/files/generateMergedSidecarDict.js @@ -13,7 +13,9 @@ function generateMergedSidecarDict(potentialSidecars, jsonContents) { const jsonObject = jsonContents[sidecarName] if (jsonObject) { for (var key in jsonObject) { - mergedDictionary[key] = jsonObject[key] + if (jsonObject.hasOwnProperty(key)) { + mergedDictionary[key] = jsonObject[key] + } } } else if (jsonObject === null) { mergedDictionary.invalid = true diff --git a/bids-validator/utils/files/readDir.js b/bids-validator/utils/files/readDir.js index 87f54b822..6321ef07b 100644 --- a/bids-validator/utils/files/readDir.js +++ b/bids-validator/utils/files/readDir.js @@ -114,7 +114,7 @@ async function preprocessNode(dir, ig, options) { * @returns {string[]} */ const getGitLsTree = (cwd, gitRef) => - new Promise((resolve, reject) => { + new Promise(resolve => { let output = '' const gitProcess = child_proccess.spawn( 'git', @@ -181,10 +181,14 @@ const readLsTreeLines = gitTreeLines => const getGitCatFile = (cwd, input) => new Promise(resolve => { let output = '' - const gitProcess = spawn('git', ['cat-file', '--batch', '--buffer'], { - cwd, - encoding: 'utf-8', - }) + const gitProcess = child_proccess.spawn( + 'git', + ['cat-file', '--batch', '--buffer'], + { + cwd, + encoding: 'utf-8', + }, + ) // pass in symlink objects gitProcess.stdin.write(input) diff --git a/bids-validator/utils/files/readFile.js b/bids-validator/utils/files/readFile.js index b0efb9bff..823299b21 100644 --- a/bids-validator/utils/files/readFile.js +++ b/bids-validator/utils/files/readFile.js @@ -39,12 +39,12 @@ const checkEncoding = (file, data, cb) => { function readFile(file, annexed, dir) { return new Promise((resolve, reject) => { if (isNode) { - testFile(file, annexed, dir, function (issue, stats, remoteBuffer) { + testFile(file, annexed, dir, function(issue, stats, remoteBuffer) { if (issue) { return reject(issue) } if (!remoteBuffer) { - fs.readFile(file.path, function (err, data) { + fs.readFile(file.path, function(err, data) { if (err) { return reject(err) } diff --git a/bids-validator/utils/issues/index.js b/bids-validator/utils/issues/index.js index 326982de4..273f9a2b2 100644 --- a/bids-validator/utils/issues/index.js +++ b/bids-validator/utils/issues/index.js @@ -86,6 +86,9 @@ var issues = { // organize by severity for (const codePropertyName in categorized) { + if (!categorized.hasOwnProperty(codePropertyName)) { + continue + } // Properties are always strings but error codes are always integers const code = parseInt(codePropertyName) issue = categorized[code] diff --git a/bids-validator/utils/summary/__tests__/collectSubjectMetadata.spec.js b/bids-validator/utils/summary/__tests__/collectSubjectMetadata.spec.js index 0f13a7895..229bc4cf2 100644 --- a/bids-validator/utils/summary/__tests__/collectSubjectMetadata.spec.js +++ b/bids-validator/utils/summary/__tests__/collectSubjectMetadata.spec.js @@ -4,10 +4,12 @@ const CRLFParticipantsTsv = 'participant_id\tsex\tage\r\nsub-01\tM\t25\r\n' describe('collectSubjectMetadata()', () => { it('handles Windows newline characters in column row', () => { - expect(collectSubjectMetadata(CRLFParticipantsTsv)).toEqual([{ - age: 25, - participantId: '01', - sex: 'M', - }]) + expect(collectSubjectMetadata(CRLFParticipantsTsv)).toEqual([ + { + age: 25, + participantId: '01', + sex: 'M', + }, + ]) }) }) diff --git a/bids-validator/utils/summary/collectSessions.js b/bids-validator/utils/summary/collectSessions.js index 0e95ce0b1..73c6154f0 100644 --- a/bids-validator/utils/summary/collectSessions.js +++ b/bids-validator/utils/summary/collectSessions.js @@ -1,6 +1,6 @@ import type from '../type' -const collectSessions = (fileList, options) => { +const collectSessions = fileList => { const sessions = [] Object.keys(fileList).forEach(key => { const file = fileList[key] diff --git a/bids-validator/utils/summary/collectSubjects.js b/bids-validator/utils/summary/collectSubjects.js index 327946e15..05ea1850c 100644 --- a/bids-validator/utils/summary/collectSubjects.js +++ b/bids-validator/utils/summary/collectSubjects.js @@ -1,6 +1,6 @@ import type from '../type' -const collectSubjects = (fileList, options) => { +const collectSubjects = fileList => { const subjects = [] const fileKeys = Object.keys(fileList) fileKeys.forEach(key => { diff --git a/bids-validator/validators/bids/__tests__/checkDatasetDescription.spec.js b/bids-validator/validators/bids/__tests__/checkDatasetDescription.spec.js index 970b7538d..b3d576cc2 100644 --- a/bids-validator/validators/bids/__tests__/checkDatasetDescription.spec.js +++ b/bids-validator/validators/bids/__tests__/checkDatasetDescription.spec.js @@ -60,9 +60,9 @@ describe('checkDatasetDescription', () => { }) describe('checkGeneticDatabaseField', () => { it('returns code 128 when there is no Genetics.Dataset with a genetic_info.json present', () => { - const invalidJsonContentsDict = { - '/dataset_description.json': { }, - '/genetic_info.json': { } + const invalidJsonContentsDict = { + '/dataset_description.json': {}, + '/genetic_info.json': {}, } let issues = checkDatasetDescription(invalidJsonContentsDict) assert( @@ -71,19 +71,18 @@ describe('checkDatasetDescription', () => { ) }) it('does not return code 128 when GeneticDataset field and genetic_info.json present', () => { - const validJsonContentsDict = { + const validJsonContentsDict = { '/dataset_description.json': { Authors: ['Benny', 'the Jets'], - Genetics: {Dataset: 'GeneticGeneticDataset'}, + Genetics: { Dataset: 'GeneticGeneticDataset' }, }, - '/genetic_info.json': { } + '/genetic_info.json': {}, } let issues = checkDatasetDescription(validJsonContentsDict) assert( issues.findIndex(issue => issue.code === 128) === -1, - 'issues does not include a code 128' + 'issues does not include a code 128', ) }) - }) }) diff --git a/bids-validator/validators/bids/start.js b/bids-validator/validators/bids/start.js index 9b81f4950..ece8ddce7 100644 --- a/bids-validator/validators/bids/start.js +++ b/bids-validator/validators/bids/start.js @@ -26,7 +26,7 @@ const start = (dir, options, callback) => { console.log(`bids-specification@${options.schema}`) } - utils.options.parse(dir, options, async function (issues, options) { + utils.options.parse(dir, options, async function(issues, options) { if (issues && issues.length > 0) { // option parsing issues callback({ config: issues }) diff --git a/bids-validator/validators/nifti/nii.js b/bids-validator/validators/nifti/nii.js index ae2bd2997..67e61b139 100644 --- a/bids-validator/validators/nifti/nii.js +++ b/bids-validator/validators/nifti/nii.js @@ -1342,7 +1342,7 @@ function checkIfIntendedExists(intendedForFile, fileList, issues, file) { * */ -function checkIfSeparateM0scanExists(m0scanFile, fileList, issues, file) { +function checkIfSeparateM0scanExists(m0scanFile, fileList) { let rule = m0scanFile.replace('_m0scan.nii', '').replace('.gz', '') let m0scanFile_nii = m0scanFile.replace('.nii.gz', '.nii') let m0scanFile_niigz = m0scanFile @@ -1364,7 +1364,7 @@ function checkIfSeparateM0scanExists(m0scanFile, fileList, issues, file) { } function matchRule_m0scan(str, rule) { - var escapeRegex = str => str.replace(/([.*+?^=!:${}()|\[\]\/\\])/g, '\\$1') + var escapeRegex = str => str.replace(/([.*+?^=!:${}()|[]\/\\])/g, '\\$1') return new RegExp( rule .split('*') diff --git a/bids-validator/validators/tsv/checkTypeCol.js b/bids-validator/validators/tsv/checkTypeCol.js index f6372ae17..c5e2891f3 100644 --- a/bids-validator/validators/tsv/checkTypeCol.js +++ b/bids-validator/validators/tsv/checkTypeCol.js @@ -1,13 +1,42 @@ const Issue = require('../../utils').issues.Issue // allowable 'type' values from the BIDS specification -const allowedMEEGTypes = ['EEG', 'ECOG', 'SEEG', 'DBS', 'PD', 'OTHER', // (i)EEG - 'EOG', 'ECG', 'EMG', 'EYEGAZE', 'GSR', 'HEOG', 'MISC', 'PUPIL', 'RESP', 'TEMP', 'VEOG', 'PPG', // physio - 'AUDIO', 'REF', 'SYSCLOCK', 'TRIG', 'ADC', 'DAC', // system - 'MEGMAG', 'MEGGRADAXIAL', 'MEGGRADPLANAR', 'MEGREFMAG', 'MEGREFGRADAXIAL', 'MEGREFGRADPLANAR', 'MEGOTHER', 'HLU', 'FITERR' // MEG +const allowedMEEGTypes = [ + 'EEG', + 'ECOG', + 'SEEG', + 'DBS', + 'PD', + 'OTHER', // (i)EEG + 'EOG', + 'ECG', + 'EMG', + 'EYEGAZE', + 'GSR', + 'HEOG', + 'MISC', + 'PUPIL', + 'RESP', + 'TEMP', + 'VEOG', + 'PPG', // physio + 'AUDIO', + 'REF', + 'SYSCLOCK', + 'TRIG', + 'ADC', + 'DAC', // system + 'MEGMAG', + 'MEGGRADAXIAL', + 'MEGGRADPLANAR', + 'MEGREFMAG', + 'MEGREFGRADAXIAL', + 'MEGREFGRADPLANAR', + 'MEGOTHER', + 'HLU', + 'FITERR', // MEG ] - /** * Checks type column in an ephys _channels.tsv file to * ensure its values are only in an acceptable set of values and fires off a @@ -19,54 +48,53 @@ const allowedMEEGTypes = ['EEG', 'ECOG', 'SEEG', 'DBS', 'PD', 'OTHER', // (i)EE * found. * @returns {null} Results of this function are stored in issues. */ -const checkTypeCol = function (rows, file, issues) { - const header = rows[0] - const typeColumn = header.indexOf('type') - if (typeColumn !== -1) { - for (let i = 1; i < rows.length; i++) { - const line = rows[i] - let type = line[typeColumn] +const checkTypeCol = function(rows, file, issues) { + const header = rows[0] + const typeColumn = header.indexOf('type') + if (typeColumn !== -1) { + for (let i = 1; i < rows.length; i++) { + const line = rows[i] + let type = line[typeColumn] - if (type === 'n/a') { - continue - } - // check type casing - let isUpperCase = true; - if (type != type.toUpperCase()) { - // The character is lowercase - isUpperCase = false; - } - // only deal with upper casing when validating for errors - type = type.toUpperCase() + if (type === 'n/a') { + continue + } + // check type casing + let isUpperCase = true + if (type != type.toUpperCase()) { + // The character is lowercase + isUpperCase = false + } + // only deal with upper casing when validating for errors + type = type.toUpperCase() - // check if an error, or a warning is needed - if (!(allowedMEEGTypes.includes(type))) { - issues.push( - new Issue({ - file: file, - evidence: line, - line: i + 1, - reason: - 'the type column values should only consist of values specified for *_channels.tsv file', - code: 131, - }), - ) - } else if (!(isUpperCase)) { - // not upper case, then warn user to use upper-casing - issues.push( - new Issue({ - file: file, - evidence: line, - line: i + 1, - reason: - 'the type column values upper-cased', - code: 130, - }), - ) - } - } + // check if an error, or a warning is needed + if (!allowedMEEGTypes.includes(type)) { + issues.push( + new Issue({ + file: file, + evidence: line, + line: i + 1, + reason: + 'the type column values should only consist of values specified for *_channels.tsv file', + code: 131, + }), + ) + } else if (!isUpperCase) { + // not upper case, then warn user to use upper-casing + issues.push( + new Issue({ + file: file, + evidence: line, + line: i + 1, + reason: 'the type column values upper-cased', + code: 130, + }), + ) + } } - return + } + return } export default checkTypeCol diff --git a/bids-validator/validators/tsv/index.js b/bids-validator/validators/tsv/index.js index 09840d960..8135da0ea 100644 --- a/bids-validator/validators/tsv/index.js +++ b/bids-validator/validators/tsv/index.js @@ -15,5 +15,5 @@ export default { validate: validate, checkAge89: checkAge89, checkAcqTimeFormat: checkAcqTimeFormat, - validateContRec: validateContRec + validateContRec: validateContRec, } diff --git a/bids-validator/validators/tsv/tsv.js b/bids-validator/validators/tsv/tsv.js index 098d427cd..2d6c72459 100644 --- a/bids-validator/validators/tsv/tsv.js +++ b/bids-validator/validators/tsv/tsv.js @@ -1,4 +1,3 @@ -import utils from '../../utils' import Issue from '../../utils/issues/issue' import checkAcqTimeFormat from './checkAcqTimeFormat' import checkAge89 from './checkAge89' @@ -149,7 +148,9 @@ const TSV = (file, contents, fileList, callback) => { // create full dataset path list const pathList = [] for (let f in fileList) { - pathList.push(fileList[f].relativePath) + if (fileList.hasOwnProperty(f)) { + pathList.push(fileList[f].relativePath) + } } // check for stimuli file @@ -222,8 +223,9 @@ const TSV = (file, contents, fileList, callback) => { new Issue({ file: file, evidence: headersEvidence(headers), - reason: 'Participant_id column should be named ' + - 'as sub-.', + reason: + 'Participant_id column should be named ' + + 'as sub-.', line: l, code: 212, }), @@ -298,10 +300,7 @@ const TSV = (file, contents, fileList, callback) => { } // blood.tsv - if ( - file.relativePath.includes('/pet/') && - file.name.endsWith('_blood.tsv') - ) { + if (file.relativePath.includes('/pet/') && file.name.endsWith('_blood.tsv')) { // Validate fields here checkheader('time', 0, file, 126) } @@ -365,7 +364,7 @@ const TSV = (file, contents, fileList, callback) => { pathList.push(fDir) } else if (fPath.includes('_ieeg.mefd/')) { // MEF3 data - const fDir = fPath.substring(0, fPath.indexOf('_ieeg.mefd/') + 10); + const fDir = fPath.substring(0, fPath.indexOf('_ieeg.mefd/') + 10) if (!pathList.includes(fDir)) { pathList.push(fDir) } diff --git a/bids-validator/validators/tsv/validateContRecordings.js b/bids-validator/validators/tsv/validateContRecordings.js index 6ca9f741d..b6dd43d87 100644 --- a/bids-validator/validators/tsv/validateContRecordings.js +++ b/bids-validator/validators/tsv/validateContRecordings.js @@ -18,7 +18,7 @@ const validateContRec = function(contRecordings, jsonContentsDict) { contRecordings.map(contRecording => { // Get merged data dictionary for this file const potentialSidecars = utils.files.potentialLocations( - contRecording.relativePath.replace('.tsv.gz', '.json') + contRecording.relativePath.replace('.tsv.gz', '.json'), ) const mergedDictionary = utils.files.generateMergedSidecarDict( @@ -29,8 +29,8 @@ const validateContRec = function(contRecordings, jsonContentsDict) { issues.push( new Issue({ file: contRecording, - code: 133 - }) + code: 133, + }), ) } }) diff --git a/bids-validator/validators/tsv/validateTsvColumns.js b/bids-validator/validators/tsv/validateTsvColumns.js index e95568d80..4f248c170 100644 --- a/bids-validator/validators/tsv/validateTsvColumns.js +++ b/bids-validator/validators/tsv/validateTsvColumns.js @@ -27,11 +27,12 @@ export const getTsvType = function(file) { return tsvType } -const getHeaders = tsvContents => tsvContents - .replace(/^\uefff/, '') - .split('\n')[0] - .trim() - .split('\t') +const getHeaders = tsvContents => + tsvContents + .replace(/^\uefff/, '') + .split('\n')[0] + .trim() + .split('\t') /** * @@ -51,7 +52,8 @@ const getCustomColumns = function(headers, type) { } return customCols } -const commaSeparatedStringOf = items => items.map(item => `"${item}"`).join(', ') +const commaSeparatedStringOf = items => + items.map(item => `"${item}"`).join(', ') /** * Loads relevant JSON schema for given tsv modalities. @@ -62,7 +64,7 @@ const commaSeparatedStringOf = items => items.map(item => `"${item}"`).join(', ' const loadSchemas = tsvs => { const schemas = {} const getSchemaByType = { - 'blood': () => require('../json/schemas/pet_blood.json') + blood: () => require('../json/schemas/pet_blood.json'), } const types = new Set(tsvs.map(tsv => getTsvType(tsv.file))) types.forEach(type => { @@ -84,10 +86,7 @@ const validateTsvColumns = function(tsvs, jsonContentsDict, headers) { tsvs.map(tsv => { const tsvType = getTsvType(tsv.file) - const customColumns = getCustomColumns( - getHeaders(tsv.contents), - tsvType, - ) + const customColumns = getCustomColumns(getHeaders(tsv.contents), tsvType) const isPetBlood = tsvType === 'blood' if (customColumns.length > 0 || isPetBlood) { // Get merged data dictionary for this file @@ -113,7 +112,11 @@ const validateTsvColumns = function(tsvs, jsonContentsDict, headers) { if (isPetBlood) { // Check PET tsv headers required by json sidecar - const petBloodHeaderIssues = validatePetBloodHeaders(tsv, mergedDict, schemas['blood']) + const petBloodHeaderIssues = validatePetBloodHeaders( + tsv, + mergedDict, + schemas['blood'], + ) tsvIssues.push(...petBloodHeaderIssues) } } @@ -140,28 +143,33 @@ export const validatePetBloodHeaders = (tsv, mergedDict, schema) => { // Collect required headers and the JSON sidecar properties that require them. const requiredHeaders = {} - Object.entries(schema.properties) - .forEach(([ property, subSchema ]) => { - if ( - subSchema.hasOwnProperty('requires_tsv_non_custom_columns') - && mergedDict[property] === true - ) { - subSchema.requires_tsv_non_custom_columns.forEach(header => { - if (header in requiredHeaders) { - requiredHeaders[header].push(property) - } else { - requiredHeaders[header] = [property] - } - }) - } - }) + Object.entries(schema.properties).forEach(([property, subSchema]) => { + if ( + subSchema.hasOwnProperty('requires_tsv_non_custom_columns') && + mergedDict[property] === true + ) { + subSchema.requires_tsv_non_custom_columns.forEach(header => { + if (header in requiredHeaders) { + requiredHeaders[header].push(property) + } else { + requiredHeaders[header] = [property] + } + }) + } + }) Object.entries(requiredHeaders).forEach(([requiredHeader, requiredBy]) => { if (!headers.includes(requiredHeader)) { - tsvIssues.push(new Issue({ - code: 211, - file: tsv.file, - evidence: `${tsv.file.name} has headers: ${commaSeparatedStringOf(headers)}; missing header "${requiredHeader}", which is required when any of the properties (${commaSeparatedStringOf(requiredBy)}) are true in the associated JSON sidecar.`, - })) + tsvIssues.push( + new Issue({ + code: 211, + file: tsv.file, + evidence: `${tsv.file.name} has headers: ${commaSeparatedStringOf( + headers, + )}; missing header "${requiredHeader}", which is required when any of the properties (${commaSeparatedStringOf( + requiredBy, + )}) are true in the associated JSON sidecar.`, + }), + ) } }) return tsvIssues @@ -187,39 +195,49 @@ const validateASL = (tsvs, jsonContentsDict, headers) => { // get the _asl_context.tsv associated with this asl scan const potentialAslContext = utils.files.potentialLocations( - file.relativePath.replace('.gz', '').replace('asl.nii', 'aslcontext.tsv'), + file.relativePath + .replace('.gz', '') + .replace('asl.nii', 'aslcontext.tsv'), + ) + const associatedAslContext = potentialAslContext.indexOf( + tsv.file.relativePath, ) - const associatedAslContext = potentialAslContext.indexOf(tsv.file.relativePath) - - if (associatedAslContext > -1) - { + if (associatedAslContext > -1) { const rows = tsv.contents - .replace(/[\r]+/g,'') - .split('\n') - .filter(row => !(!row || /^\s*$/.test(row))) + .replace(/[\r]+/g, '') + .split('\n') + .filter(row => !(!row || /^\s*$/.test(row))) - const m0scan_filters = ['m0scan']; - const filtered_m0scan_rows = rows.filter(row => m0scan_filters.includes(row)) + const m0scan_filters = ['m0scan'] + const filtered_m0scan_rows = rows.filter(row => + m0scan_filters.includes(row), + ) - const asl_filters = ['cbf','m0scan','label','control','deltam','volume_type']; + const asl_filters = [ + 'cbf', + 'm0scan', + 'label', + 'control', + 'deltam', + 'volume_type', + ] const filtered_tsv_rows = rows.filter(row => asl_filters.includes(row)) - if (rows.length != filtered_tsv_rows.length) - { + if (rows.length != filtered_tsv_rows.length) { tsvIssues.push( new Issue({ code: 176, file: file, - }) + }), ) } - if (rows.length -1 != numVols) { + if (rows.length - 1 != numVols) { tsvIssues.push( new Issue({ code: 165, file: file, - }) + }), ) } @@ -235,94 +253,92 @@ const validateASL = (tsvs, jsonContentsDict, headers) => { ) // check M0Type and tsv list for m0scan in case of an Included M0Type - if (mergedDict.hasOwnProperty('M0Type') && - mergedDict['M0Type'] === "Included" && - filtered_m0scan_rows.length < 1 - ) - { + if ( + mergedDict.hasOwnProperty('M0Type') && + mergedDict['M0Type'] === 'Included' && + filtered_m0scan_rows.length < 1 + ) { tsvIssues.push( new Issue({ file: file, code: 154, reason: - "''M0Type' is set to 'Included' however the tsv file does not contain any m0scan volume." + "''M0Type' is set to 'Included' however the tsv file does not contain any m0scan volume.", }), ) } // check M0Type and tsv list for m0scan in case of an Absent M0Type - if (mergedDict.hasOwnProperty('M0Type') && - mergedDict['M0Type'] === "Absent" && - filtered_m0scan_rows.length >= 1 - ) - { + if ( + mergedDict.hasOwnProperty('M0Type') && + mergedDict['M0Type'] === 'Absent' && + filtered_m0scan_rows.length >= 1 + ) { tsvIssues.push( new Issue({ file: file, code: 199, reason: - "''M0Type' is set to 'Absent' however the tsv file contains an m0scan volume. This should be avoided." + "''M0Type' is set to 'Absent' however the tsv file contains an m0scan volume. This should be avoided.", }), ) } // check Flip Angle requirements with LookLocker acquisitions if ( - mergedDict.hasOwnProperty('FlipAngle') && - mergedDict['FlipAngle'].constructor === Array - ) - { + mergedDict.hasOwnProperty('FlipAngle') && + mergedDict['FlipAngle'].constructor === Array + ) { let FlipAngle = mergedDict['FlipAngle'] const FlipAngleLength = FlipAngle.length - if (FlipAngleLength !== rows.length -1) { + if (FlipAngleLength !== rows.length - 1) { tsvIssues.push( new Issue({ file: file, code: 172, reason: - "''FlipAngle' for this file does not match the TSV length. Please make sure that the size of the FlipAngle array in the json corresponds to the number of volume listed in the tsv file." + "''FlipAngle' for this file does not match the TSV length. Please make sure that the size of the FlipAngle array in the json corresponds to the number of volume listed in the tsv file.", }), ) } } // check Labelling Duration matching with TSV length only for PCASL or CASL - if - ( + if ( mergedDict.hasOwnProperty('LabelingDuration') && mergedDict['LabelingDuration'].constructor === Array && mergedDict.hasOwnProperty('ArterialSpinLabelingType') && - (mergedDict['ArterialSpinLabelingType'] == 'CASL' || mergedDict['ArterialSpinLabelingType'] == 'PCASL') - ) - { + (mergedDict['ArterialSpinLabelingType'] == 'CASL' || + mergedDict['ArterialSpinLabelingType'] == 'PCASL') + ) { let LabelingDuration = mergedDict['LabelingDuration'] const LabelingDurationLength = LabelingDuration.length - if (LabelingDurationLength !== rows.length -1) { + if (LabelingDurationLength !== rows.length - 1) { tsvIssues.push( new Issue({ file: file, code: 175, reason: - "''LabelingDuration' for this file does not match the TSV length. Please be sure that the size of the LabelingDuration array in the json corresponds to the number of volume listed in the tsv file." + "''LabelingDuration' for this file does not match the TSV length. Please be sure that the size of the LabelingDuration array in the json corresponds to the number of volume listed in the tsv file.", }), ) } } // check VolumeTiming with TSV length - if - ( + if ( mergedDict.hasOwnProperty('RepetitionTimePreparation') && mergedDict['RepetitionTimePreparation'].constructor === Array - ) - { - let RepetitionTimePreparation = mergedDict['RepetitionTimePreparation'] - const RepetitionTimePreparationLength = RepetitionTimePreparation.length - if (RepetitionTimePreparationLength !== rows.length -1) { + ) { + let RepetitionTimePreparation = + mergedDict['RepetitionTimePreparation'] + const RepetitionTimePreparationLength = + RepetitionTimePreparation.length + if (RepetitionTimePreparationLength !== rows.length - 1) { tsvIssues.push( new Issue({ file: file, code: 177, reason: - "''RepetitionTimePreparation' for this file do not match the TSV length. Please be sure that the size of the RepetitionTimePreparation array in the json corresponds to the number of volume listed in the tsv file." + "''RepetitionTimePreparation' for this file do not match the TSV length. Please be sure that the size of the RepetitionTimePreparation array in the json corresponds to the number of volume listed in the tsv file.", }), ) } @@ -330,52 +346,51 @@ const validateASL = (tsvs, jsonContentsDict, headers) => { // check Post Labelling Delays matching with TSV length if ( - mergedDict.hasOwnProperty('PostLabelingDelay') && - mergedDict['PostLabelingDelay'].constructor === Array - ) - { + mergedDict.hasOwnProperty('PostLabelingDelay') && + mergedDict['PostLabelingDelay'].constructor === Array + ) { let PostLabelingDelay = mergedDict['PostLabelingDelay'] const PostLabelingDelayLength = PostLabelingDelay.length - if (PostLabelingDelayLength !== rows.length -1) { + if (PostLabelingDelayLength !== rows.length - 1) { tsvIssues.push( new Issue({ file: file, code: 174, reason: - "''PostLabelingDelay' for this file do not match the TSV length. Please be sure that the size of the PostLabelingDelay array in the json corresponds to the number of volume listed in the tsv file." + "''PostLabelingDelay' for this file do not match the TSV length. Please be sure that the size of the PostLabelingDelay array in the json corresponds to the number of volume listed in the tsv file.", }), ) } } - if ( mergedDict.hasOwnProperty('TotalAcquiredVolumes') ) { + if (mergedDict.hasOwnProperty('TotalAcquiredVolumes')) { let TotalAcquiredVolumes = mergedDict['TotalAcquiredVolumes'] const TotalAcquiredVolumesLength = TotalAcquiredVolumes.length - if (TotalAcquiredVolumesLength !== rows.length -1) { + if (TotalAcquiredVolumesLength !== rows.length - 1) { tsvIssues.push( new Issue({ file: file, code: 181, reason: - "''TotalAcquiredVolumes' for this file do not match the TSV length. Please be sure that the size of the TotalAcquiredVolumes array in the json corresponds to the number of volume listed in the tsv file." + "''TotalAcquiredVolumes' for this file do not match the TSV length. Please be sure that the size of the TotalAcquiredVolumes array in the json corresponds to the number of volume listed in the tsv file.", }), ) } } if ( - mergedDict.hasOwnProperty('EchoTime') && - mergedDict['EchoTime'].constructor === Array - ) { + mergedDict.hasOwnProperty('EchoTime') && + mergedDict['EchoTime'].constructor === Array + ) { let EchoTime = mergedDict['EchoTime'] const EchoTimeLength = EchoTime.length - if (EchoTimeLength !== rows.length -1) { + if (EchoTimeLength !== rows.length - 1) { tsvIssues.push( new Issue({ file: file, code: 196, reason: - "''EchoTime' for this file do not match the TSV length. Please be sure that the size of the EchoTime array in the json corresponds to the number of volume listed in the tsv file." + "''EchoTime' for this file do not match the TSV length. Please be sure that the size of the EchoTime array in the json corresponds to the number of volume listed in the tsv file.", }), ) } From 0d67e6ae17ecea1bedc8d57bc252a3e434134793 Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Fri, 7 Jan 2022 16:43:44 -0600 Subject: [PATCH 019/101] move eslintrc to root of project --- bids-validator/.eslintrc.json => .eslintrc.json | 0 package.json | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) rename bids-validator/.eslintrc.json => .eslintrc.json (100%) diff --git a/bids-validator/.eslintrc.json b/.eslintrc.json similarity index 100% rename from bids-validator/.eslintrc.json rename to .eslintrc.json diff --git a/package.json b/package.json index 724a02f69..d51ddcc58 100644 --- a/package.json +++ b/package.json @@ -5,7 +5,7 @@ "bids-validator-web" ], "scripts": { - "lint": "eslint -c bids-validator/.eslintrc.json bids-validator/", + "lint": "eslint bids-validator", "coverage": "jest --coverage", "codecov": "codecov", "test": "node bids-validator/bin/test-submodule-exists && jest", From d1d6ad8241e5c515ffd2db23a083e98842c55fa4 Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Fri, 7 Jan 2022 16:49:49 -0600 Subject: [PATCH 020/101] set no auto clrf for git/actions to fix failing windows test --- .github/workflows/node_tests.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/node_tests.yml b/.github/workflows/node_tests.yml index 6b4e310a0..06a344de0 100644 --- a/.github/workflows/node_tests.yml +++ b/.github/workflows/node_tests.yml @@ -17,6 +17,10 @@ jobs: runs-on: ${{ matrix.platform }} steps: + - name: Set git to use LF + run: | + git config --global core.autocrlf false + git config --global core.eol - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: From 5ca5d73d377f0dcbc59bf73b96ed88931c70b4fa Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Fri, 7 Jan 2022 16:52:26 -0600 Subject: [PATCH 021/101] add missing lf.. --- .github/workflows/node_tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/node_tests.yml b/.github/workflows/node_tests.yml index 06a344de0..407d055f2 100644 --- a/.github/workflows/node_tests.yml +++ b/.github/workflows/node_tests.yml @@ -20,7 +20,7 @@ jobs: - name: Set git to use LF run: | git config --global core.autocrlf false - git config --global core.eol + git config --global core.eol lf - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: From e7bd0dff75487d94767c208bea60f1b127227ca9 Mon Sep 17 00:00:00 2001 From: Etienne Bergeron Date: Wed, 28 Jul 2021 14:21:34 -0400 Subject: [PATCH 022/101] Added the microscopy Regex parts in the rules and added the function IsMicroscopy called by IsBids in type.js --- .../rules/file_level_rules.json | 13 +++++++++++ .../rules/phenotypic_rules.json | 2 +- .../bids_validator/rules/top_level_rules.json | 13 ++++++++++- bids-validator/utils/type.js | 22 ++++++++++++++----- 4 files changed, 43 insertions(+), 7 deletions(-) diff --git a/bids-validator/bids_validator/rules/file_level_rules.json b/bids-validator/bids_validator/rules/file_level_rules.json index 54fb504f7..ad71f6256 100644 --- a/bids-validator/bids_validator/rules/file_level_rules.json +++ b/bids-validator/bids_validator/rules/file_level_rules.json @@ -523,5 +523,18 @@ "tokens": { "@@@_pet_ext_@@@": ["blood\\.tsv\\.gz", "blood\\.tsv", "blood\\.json"] } + }, + + "microscopy": { + "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?microscopy[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_sample-[a-zA-Z0-9]+)?(?:_chunk-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(@@@_microscopy_ext_@@@)$", + "tokens": { + "@@@_microscopy_ext_@@@": [ + "_sem\\.ome\\.tif", + "_sem\\.ome\\.btf", + "_sem\\.tif", + "_sem\\.png", + "_sem\\.json" + ] + } } } diff --git a/bids-validator/bids_validator/rules/phenotypic_rules.json b/bids-validator/bids_validator/rules/phenotypic_rules.json index b981c01a3..34cb7afda 100644 --- a/bids-validator/bids_validator/rules/phenotypic_rules.json +++ b/bids-validator/bids_validator/rules/phenotypic_rules.json @@ -2,4 +2,4 @@ "phenotypic_data": { "regexp": "^[\\/\\\\](?:phenotype)[\\/\\\\](?:.*\\.tsv|.*\\.json)$" } -} +} \ No newline at end of file diff --git a/bids-validator/bids_validator/rules/top_level_rules.json b/bids-validator/bids_validator/rules/top_level_rules.json index 9757f6e53..63cbb33b3 100644 --- a/bids-validator/bids_validator/rules/top_level_rules.json +++ b/bids-validator/bids_validator/rules/top_level_rules.json @@ -15,7 +15,9 @@ "phase2.json", "fieldmap.json", "events.json", - "scans.json" + "scans.json", + "samples.json", + "samples.tsv" ] } }, @@ -133,5 +135,14 @@ "tokens": { "@@@_other_top_files_ext_@@@": ["physio\\.json", "stim\\.json"] } + }, + + "microscopy_top": { + "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)(?:_sample-[a-zA-Z0-9]+)?(?:_chunk-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:@@@_microscopy_top_ext_@@@)$", + "tokens": { + "@@@_microscopy_top_ext_@@@": [ + "_sem\\.json" + ] + } } } diff --git a/bids-validator/utils/type.js b/bids-validator/utils/type.js index d6199c999..eca3e8f17 100644 --- a/bids-validator/utils/type.js +++ b/bids-validator/utils/type.js @@ -59,6 +59,7 @@ const megCrosstalkData = buildRegExp(file_level_rules.meg_crosstalk) const stimuliData = buildRegExp(file_level_rules.stimuli) const petData = buildRegExp(file_level_rules.pet) const petBlood = buildRegExp(file_level_rules.pet_blood) +const microscopyData = buildRegExp(file_level_rules.microscopy) // Phenotypic data const phenotypicData = buildRegExp(phenotypic_rules.phenotypic_data) // Session level @@ -71,6 +72,7 @@ const ieegSes = buildRegExp(session_level_rules.ieeg_ses) const megSes = buildRegExp(session_level_rules.meg_ses) const scansSes = buildRegExp(session_level_rules.scans) const petSes = buildRegExp(session_level_rules.pet_ses) +const microscopySes = buildRegExp(session_level_rules.microscopy_ses) // Subject level const subjectLevel = buildRegExp(subject_level_rules.subject_level) // Top level @@ -85,6 +87,7 @@ const multiDirFieldmap = buildRegExp(top_level_rules.multi_dir_fieldmap) const otherTopFiles = buildRegExp(top_level_rules.other_top_files) const megTop = buildRegExp(top_level_rules.meg_top) const petTop = buildRegExp(top_level_rules.pet_top) +const microscopyTop = buildRegExp(top_level_rules.microscopy_top) export default { /** @@ -110,7 +113,8 @@ export default { this.file.isFieldMap(path) || this.file.isPhenotypic(path) || this.file.isPET(path) || - this.file.isPETBlood(path) + this.file.isPETBlood(path) || + this.file.isMicroscopy(path) ) }, @@ -134,7 +138,8 @@ export default { megTop.test(path) || eegTop.test(path) || ieegTop.test(path) || - petTop.test(path) + petTop.test(path) || + microscopyTop.test(path) ) } else { return ( @@ -148,7 +153,8 @@ export default { megTop.test(path) || eegTop.test(path) || ieegTop.test(path) || - petTop.test(path) + petTop.test(path) || + microscopyTop.test(path) ) } }, @@ -203,7 +209,8 @@ export default { conditionalMatch(megSes, path) || conditionalMatch(eegSes, path) || conditionalMatch(ieegSes, path) || - conditionalMatch(petSes, path) + conditionalMatch(petSes, path) || + conditionalMatch(microscopySes, path) ) }, @@ -339,6 +346,10 @@ export default { } }, + isMicroscopy: function(path) { + return conditionalMatch(microscopyData, path) + }, + isBehavioral: function(path) { if (bids_schema) { return bids_schema.datatypes['beh'].some(regex => regex.exec(path)) @@ -365,7 +376,8 @@ export default { this.isBehavioral(path) || this.isFuncBold(path) || this.isPET(path) || - this.isPETBlood(path) + this.isPETBlood(path) || + this.isMicroscopy(path) ) }, }, From a8d5eca3fbc0e82ba81332789550356afabc9d74 Mon Sep 17 00:00:00 2001 From: Etienne Bergeron Date: Wed, 28 Jul 2021 15:15:13 -0400 Subject: [PATCH 023/101] Added the different microscopy modality suffixes --- .../rules/file_level_rules.json | 31 +++++++++++++++---- .../rules/session_level_rules.json | 25 +++++++++++++++ .../bids_validator/rules/top_level_rules.json | 18 ++++++++++- 3 files changed, 67 insertions(+), 7 deletions(-) diff --git a/bids-validator/bids_validator/rules/file_level_rules.json b/bids-validator/bids_validator/rules/file_level_rules.json index ad71f6256..3b0aa6720 100644 --- a/bids-validator/bids_validator/rules/file_level_rules.json +++ b/bids-validator/bids_validator/rules/file_level_rules.json @@ -526,14 +526,33 @@ }, "microscopy": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?microscopy[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_sample-[a-zA-Z0-9]+)?(?:_chunk-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(@@@_microscopy_ext_@@@)$", + "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?microscopy[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_sample-[a-zA-Z0-9]+)?(?:_chunk-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(@@@_microscopy_type_@@@)(@@@_microscopy_ext_@@@)$", "tokens": { + "@@@_microscopy_type_@@@": [ + "_tem", + "_sem", + "_ct", + "_bf", + "_df", + "_pc", + "_dic", + "_fluo", + "_conf", + "_pli", + "_cars", + "_2pe", + "_mpe", + "_sr", + "_nlo", + "_oct", + "_spim" + ], "@@@_microscopy_ext_@@@": [ - "_sem\\.ome\\.tif", - "_sem\\.ome\\.btf", - "_sem\\.tif", - "_sem\\.png", - "_sem\\.json" + ".ome\\.tif", + ".ome\\.btf", + ".tif", + ".png", + ".json" ] } } diff --git a/bids-validator/bids_validator/rules/session_level_rules.json b/bids-validator/bids_validator/rules/session_level_rules.json index 48fd6c724..f32b0afdf 100644 --- a/bids-validator/bids_validator/rules/session_level_rules.json +++ b/bids-validator/bids_validator/rules/session_level_rules.json @@ -201,5 +201,30 @@ "UNCInfant2V23" ] } + }, + + "microscopy_ses": { + "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?\\1(_\\2)?(?:_sample-[a-zA-Z0-9]+)?(?:_chunk-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+_)?(@@@_microscopy_ses_type_@@@)$", + "tokens": { + "@@@_microscopy_ses_type_@@@": [ + "_tem.json", + "_sem.json", + "_ct.json", + "_bf.json", + "_df.json", + "_pc.json", + "_dic.json", + "_fluo.json", + "_conf.json", + "_pli.json", + "_cars.json", + "_2pe.json", + "_mpe.json", + "_sr.json", + "_nlo.json", + "_oct.json", + "_spim.json" + ] + } } } diff --git a/bids-validator/bids_validator/rules/top_level_rules.json b/bids-validator/bids_validator/rules/top_level_rules.json index 63cbb33b3..7dd2181c2 100644 --- a/bids-validator/bids_validator/rules/top_level_rules.json +++ b/bids-validator/bids_validator/rules/top_level_rules.json @@ -141,7 +141,23 @@ "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)(?:_sample-[a-zA-Z0-9]+)?(?:_chunk-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:@@@_microscopy_top_ext_@@@)$", "tokens": { "@@@_microscopy_top_ext_@@@": [ - "_sem\\.json" + "_tem\\.json", + "_sem\\.json", + "_ct\\.json", + "_bf\\.json", + "_df\\.json", + "_pc\\.json", + "_dic\\.json", + "_fluo\\.json", + "_conf\\.json", + "_pli\\.json", + "_cars\\.json", + "_2pe\\.json", + "_mpe\\.json", + "_sr\\.json", + "_nlo\\.json", + "_oct\\.json", + "_spim\\.json" ] } } From c3ff2fb25ed0338e362410c7dae9e2d328d0c329 Mon Sep 17 00:00:00 2001 From: Etienne Bergeron Date: Wed, 28 Jul 2021 15:22:09 -0400 Subject: [PATCH 024/101] Add back the enter removed by accident in phenotypic_rules.json --- bids-validator/bids_validator/rules/phenotypic_rules.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bids-validator/bids_validator/rules/phenotypic_rules.json b/bids-validator/bids_validator/rules/phenotypic_rules.json index 34cb7afda..b981c01a3 100644 --- a/bids-validator/bids_validator/rules/phenotypic_rules.json +++ b/bids-validator/bids_validator/rules/phenotypic_rules.json @@ -2,4 +2,4 @@ "phenotypic_data": { "regexp": "^[\\/\\\\](?:phenotype)[\\/\\\\](?:.*\\.tsv|.*\\.json)$" } -} \ No newline at end of file +} From c9ea85cbf7b41f19dc97f40beda72b347a303692 Mon Sep 17 00:00:00 2001 From: Etienne Bergeron Date: Thu, 29 Jul 2021 15:25:17 -0400 Subject: [PATCH 025/101] Created the file with the required metadata fields microscopy.json and modified json.js to call it --- bids-validator/validators/json/json.js | 43 ++++++++++++ .../validators/json/schemas/microscopy.json | 70 +++++++++++++++++++ 2 files changed, 113 insertions(+) create mode 100644 bids-validator/validators/json/schemas/microscopy.json diff --git a/bids-validator/validators/json/json.js b/bids-validator/validators/json/json.js index d38493ecb..4eb7fa464 100644 --- a/bids-validator/validators/json/json.js +++ b/bids-validator/validators/json/json.js @@ -96,6 +96,26 @@ const selectSchema = file => { schema = require('./schemas/ieeg.json') } else if (file.name.endsWith('eeg.json')) { schema = require('./schemas/eeg.json') + } else if ( + file.name.endsWith('tem.json') || + file.name.endsWith('sem.json') || + file.name.endsWith('ct.json') || + file.name.endsWith('bf.json') || + file.name.endsWith('df.json') || + file.name.endsWith('pc.json') || + file.name.endsWith('dic.json') || + file.name.endsWith('fluo.json') || + file.name.endsWith('conf.json') || + file.name.endsWith('pli.json') || + file.name.endsWith('cars.json') || + file.name.endsWith('2pe.json') || + file.name.endsWith('mpe.json') || + file.name.endsWith('sr.json') || + file.name.endsWith('nlo.json') || + file.name.endsWith('oct.json') || + file.name.endsWith('spim.json') + ) { + schema = require('./schemas/microscopy.json') } else if ( file.relativePath.includes('/meg/') && file.name.endsWith('coordsystem.json') @@ -113,6 +133,29 @@ const selectSchema = file => { schema = require('./schemas/coordsystem_eeg.json') } else if ( file.relativePath.includes('/pet/') && + ( + file.name.endsWith('tem.json') || + file.name.endsWith('sem.json') || + file.name.endsWith('ct.json') || + file.name.endsWith('bf.json') || + file.name.endsWith('df.json') || + file.name.endsWith('pc.json') || + file.name.endsWith('dic.json') || + file.name.endsWith('fluo.json') || + file.name.endsWith('conf.json') || + file.name.endsWith('pli.json') || + file.name.endsWith('cars.json') || + file.name.endsWith('2pe.json') || + file.name.endsWith('mpe.json') || + file.name.endsWith('sr.json') || + file.name.endsWith('nlo.json') || + file.name.endsWith('oct.json') || + file.name.endsWith('spim.json') + ) + ) { + schema = require('./schemas/microscopy.json') + } else if ( + file.relativePath.includes('/microscopy/') && file.name.endsWith('blood.json') ) { schema = require('./schemas/pet_blood.json') diff --git a/bids-validator/validators/json/schemas/microscopy.json b/bids-validator/validators/json/schemas/microscopy.json new file mode 100644 index 000000000..feafa04f1 --- /dev/null +++ b/bids-validator/validators/json/schemas/microscopy.json @@ -0,0 +1,70 @@ +{ + "type": "object", + "properties": { + "Manufacturer": { "type": "string", "minLength": 1 }, + "ManufacturersModelName": { "type": "string", "minLength": 1 }, + "DeviceSerialNumber": {"type": "string", "minLength": 1}, + "StationName": { "type": "string", "minLength": 1 }, + "SoftwareVersions": { "type": "string", "minLength": 1 }, + "InstitutionName": { "type": "string", "minLength": 1 }, + "InstitutionAddress": { "type": "string", "minLength": 1 }, + "InstitutionalDepartmentName": { "type": "string", "minLength": 1 }, + "BodyPart": { "type": "string", "minLength": 1 }, + "BodyPartDetails": { "type": "string", "minLength": 1 }, + "BodyPartDetailsOntology": { "type": "string", "minLength": 1 }, + "Environment": { "type": "string", "minLength": 1 }, + "SampleEmbedding": { "type": "string", "minLength": 1 }, + "SampleFixation": { "type": "string", "minLength": 1 }, + "SampleStaining": { + "anyOf": [ + { "type": "string", "minLength": 1 }, + { "type": "array", "items": { "type": "string" } } + ] + }, + "SamplePrimaryAntibody": { + "anyOf": [ + { "type": "string", "minLength": 1 }, + { "type": "array", "items": { "type": "string" } } + ] + }, + "SampleSecondaryAntibody": { + "anyOf": [ + { "type": "string", "minLength": 1 }, + { "type": "array", "items": { "type": "string" } } + ] + }, + "SliceThickness": { "type": "number" }, + "SampleExtractionProtocol": { "type": "string", "minLength": 1 }, + "SampleExtractionInstitution": { "type": "string", "minLength": 1 }, + "ShrinkageFactor": { "type": "number" }, + "PixelSize": {"type": "array", "items": { "type": "number" } }, + "PixelSizeUnits": { "type": "string", "minLength": 1 }, + "ImageAcquisitionProtocol": { "type": "string", "minLength": 1 }, + "AcquisitionParameters": { "type": "string", "minLength": 1 } + }, + + "required": [ + "Manufacturer", + "ManufacturersModelName", + "DeviceSerialNumber", + "StationName", + "SoftwareVersions", + "InstitutionName", + "InstitutionAddress", + "InstitutionalDepartmentName", + + "BodyPart", + "Environment", + "SampleStaining", + "SamplePrimaryAntibody", + "SampleSecondaryAntibody", + + "PixelSize", + "PixelSizeUnits" + ], + + "dependencies": { + "PixelSize": ["PixelSizeUnits"] + } + +} From 23f3c1cc95a0f5b764d2e6f13e7b6d828ad9a5fe Mon Sep 17 00:00:00 2001 From: Etienne Bergeron Date: Fri, 30 Jul 2021 09:54:41 -0400 Subject: [PATCH 026/101] JSON field 'AcquisitionParameters' is now 'OtherAcquisitionParameters' --- bids-validator/validators/json/schemas/microscopy.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bids-validator/validators/json/schemas/microscopy.json b/bids-validator/validators/json/schemas/microscopy.json index feafa04f1..227f3e2af 100644 --- a/bids-validator/validators/json/schemas/microscopy.json +++ b/bids-validator/validators/json/schemas/microscopy.json @@ -40,7 +40,7 @@ "PixelSize": {"type": "array", "items": { "type": "number" } }, "PixelSizeUnits": { "type": "string", "minLength": 1 }, "ImageAcquisitionProtocol": { "type": "string", "minLength": 1 }, - "AcquisitionParameters": { "type": "string", "minLength": 1 } + "OtherAcquisitionParameters": { "type": "string", "minLength": 1 } }, "required": [ From 67fb84a9383bbb22a4d7d7b0052b80cae6ed5706 Mon Sep 17 00:00:00 2001 From: Etienne Bergeron Date: Tue, 3 Aug 2021 17:33:57 -0400 Subject: [PATCH 027/101] Added error codes 214 and 215 for missing samples files. Added the file 'checkSamples.js' to check if the samples files are in the dataset. WIP; checkSamples considers for now that those files are always needed. Need to add the regex to find the sample entity. Modified 'fulLTest.js' to call 'checkSamples.js'. --- bids-validator/utils/issues/list.js | 12 ++++++++++++ bids-validator/validators/bids/checkSamples.js | 17 +++++++++++++++++ bids-validator/validators/bids/fullTest.js | 5 +++++ 3 files changed, 34 insertions(+) create mode 100644 bids-validator/validators/bids/checkSamples.js diff --git a/bids-validator/utils/issues/list.js b/bids-validator/utils/issues/list.js index 8b395b12c..015833f34 100644 --- a/bids-validator/utils/issues/list.js +++ b/bids-validator/utils/issues/list.js @@ -1042,4 +1042,16 @@ export default { reason: 'The recommended file /README is very small. Please consider expanding it with additional information about the dataset.', }, + 214: { + key: 'SAMPLES_JSON_MISSING', + severity: 'error', + reason: + 'The compulsory file /samples.json is missing. See Section 03 (Modality agnostic files) of the BIDS specification.', + }, + 215: { + key: 'SAMPLES_TSV_MISSING', + severity: 'error', + reason: + 'The compulsory file /samples.tsv is missing. See Section 03 (Modality agnostic files) of the BIDS specification.', + }, } diff --git a/bids-validator/validators/bids/checkSamples.js b/bids-validator/validators/bids/checkSamples.js new file mode 100644 index 000000000..7ffcde4cc --- /dev/null +++ b/bids-validator/validators/bids/checkSamples.js @@ -0,0 +1,17 @@ +const Issue = require('../../utils').issues.Issue + +const checkSamples = jsonContentsDict => { + let issues = [] + const jsonFilePaths = Object.keys(jsonContentsDict) + const hasSamples = jsonFilePaths.some(path => { + return path == '/samples.json' + }) + + + if (!hasSamples) { + issues.push(new Issue({ code: 214 })) + } else { } + return issues +} + +export default checkSamples diff --git a/bids-validator/validators/bids/fullTest.js b/bids-validator/validators/bids/fullTest.js index a9c4d240f..41bcff9e1 100644 --- a/bids-validator/validators/bids/fullTest.js +++ b/bids-validator/validators/bids/fullTest.js @@ -14,6 +14,7 @@ import subSesMismatchTest from './subSesMismatchTest' import groupFileTypes from './groupFileTypes' import subjects from './subjects' import checkDatasetDescription from './checkDatasetDescription' +import checkSamples from './checkSamples' import checkReadme from './checkReadme' import validateMisc from '../../utils/files/validateMisc' import collectSubjectMetadata from '../../utils/summary/collectSubjectMetadata' @@ -136,6 +137,10 @@ const fullTest = (fileList, options, annexed, dir, schema, callback) => { const datasetDescriptionIssues = checkDatasetDescription(jsonContentsDict) self.issues = self.issues.concat(datasetDescriptionIssues) + // Check for samples file in the proper place + const samplesIssues = checkSamples(jsonContentsDict) + self.issues = self.issues.concat(samplesIssues) + // Check for README file in the proper place const readmeIssues = checkReadme(fileList) self.issues = self.issues.concat(readmeIssues) From 5f1e2eed71801fa56a1b3ef83fa20c5c7eacabeb Mon Sep 17 00:00:00 2001 From: Etienne Bergeron Date: Wed, 4 Aug 2021 15:11:18 -0400 Subject: [PATCH 028/101] Corrected a typo between the 'pet' and 'microscopy' paths. --- bids-validator/validators/json/json.js | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/bids-validator/validators/json/json.js b/bids-validator/validators/json/json.js index 4eb7fa464..d18a1964d 100644 --- a/bids-validator/validators/json/json.js +++ b/bids-validator/validators/json/json.js @@ -116,6 +116,7 @@ const selectSchema = file => { file.name.endsWith('spim.json') ) { schema = require('./schemas/microscopy.json') + } else if ( file.relativePath.includes('/meg/') && file.name.endsWith('coordsystem.json') @@ -132,7 +133,7 @@ const selectSchema = file => { ) { schema = require('./schemas/coordsystem_eeg.json') } else if ( - file.relativePath.includes('/pet/') && + file.relativePath.includes('/microscopy/') && ( file.name.endsWith('tem.json') || file.name.endsWith('sem.json') || @@ -154,8 +155,9 @@ const selectSchema = file => { ) ) { schema = require('./schemas/microscopy.json') + } else if ( - file.relativePath.includes('/microscopy/') && + file.relativePath.includes('/pet/') && file.name.endsWith('blood.json') ) { schema = require('./schemas/pet_blood.json') From 2132b88434dbb107106c70d10882b410faf40dca Mon Sep 17 00:00:00 2001 From: Etienne Bergeron Date: Wed, 4 Aug 2021 15:15:46 -0400 Subject: [PATCH 029/101] Codes '216' and '217' added for 'PixelSizeUnits' and a missing recommended JSON field respectively. --- bids-validator/utils/issues/list.js | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/bids-validator/utils/issues/list.js b/bids-validator/utils/issues/list.js index 015833f34..3cc799496 100644 --- a/bids-validator/utils/issues/list.js +++ b/bids-validator/utils/issues/list.js @@ -1054,4 +1054,16 @@ export default { reason: 'The compulsory file /samples.tsv is missing. See Section 03 (Modality agnostic files) of the BIDS specification.', }, + 216: { + key: 'PIXEL_SIZE_UNITS', + severity: 'error', + reason: + "PixelSizeUnits was not defined in microseconds (um) in the image's metadata.", + }, +217: { + key: 'JSON_FIELD_MISSING', + severity: 'warning', + reason: + 'One or more recommended fields are missing in the JSON file. See Section Microscopy of the BIDS specification.', + } } From a82a847777894dd09e9d71d95f83e3a52d06fa6e Mon Sep 17 00:00:00 2001 From: Etienne Bergeron Date: Wed, 4 Aug 2021 16:36:22 -0400 Subject: [PATCH 030/101] Removed code 214 since 'samples.json' is only recommended if samples.tsv is there (already coded). --- bids-validator/utils/issues/list.js | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/bids-validator/utils/issues/list.js b/bids-validator/utils/issues/list.js index 3cc799496..99711ddc5 100644 --- a/bids-validator/utils/issues/list.js +++ b/bids-validator/utils/issues/list.js @@ -1043,24 +1043,18 @@ export default { 'The recommended file /README is very small. Please consider expanding it with additional information about the dataset.', }, 214: { - key: 'SAMPLES_JSON_MISSING', - severity: 'error', - reason: - 'The compulsory file /samples.json is missing. See Section 03 (Modality agnostic files) of the BIDS specification.', - }, - 215: { key: 'SAMPLES_TSV_MISSING', severity: 'error', reason: 'The compulsory file /samples.tsv is missing. See Section 03 (Modality agnostic files) of the BIDS specification.', }, - 216: { + 215: { key: 'PIXEL_SIZE_UNITS', severity: 'error', reason: "PixelSizeUnits was not defined in microseconds (um) in the image's metadata.", }, -217: { +216: { key: 'JSON_FIELD_MISSING', severity: 'warning', reason: From 28c9d447755e393598e0cefd5ac189672d6107b7 Mon Sep 17 00:00:00 2001 From: Etienne Bergeron Date: Wed, 4 Aug 2021 16:46:36 -0400 Subject: [PATCH 031/101] Modified 'checkSamples.js' to check 'samples.tsv' instead of JSON. Might not need this file, WIP. In 'microscopy.json', removed the recommended fields from the 'required' dict. --- bids-validator/validators/json/schemas/microscopy.json | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/bids-validator/validators/json/schemas/microscopy.json b/bids-validator/validators/json/schemas/microscopy.json index 227f3e2af..754bdc594 100644 --- a/bids-validator/validators/json/schemas/microscopy.json +++ b/bids-validator/validators/json/schemas/microscopy.json @@ -44,6 +44,11 @@ }, "required": [ + "PixelSize", + "PixelSizeUnits" + ], + + "recommended": [ "Manufacturer", "ManufacturersModelName", "DeviceSerialNumber", @@ -57,10 +62,7 @@ "Environment", "SampleStaining", "SamplePrimaryAntibody", - "SampleSecondaryAntibody", - - "PixelSize", - "PixelSizeUnits" + "SampleSecondaryAntibody" ], "dependencies": { From 8fdd70bc9caec1ff55321a636209ce2406f86b7f Mon Sep 17 00:00:00 2001 From: Etienne Bergeron Date: Thu, 5 Aug 2021 10:45:58 -0400 Subject: [PATCH 032/101] Modified again to check 'samples.tsv'. --- .../validators/bids/checkSamples.js | 55 +++++++++++++++++-- 1 file changed, 49 insertions(+), 6 deletions(-) diff --git a/bids-validator/validators/bids/checkSamples.js b/bids-validator/validators/bids/checkSamples.js index 7ffcde4cc..e22d2ce35 100644 --- a/bids-validator/validators/bids/checkSamples.js +++ b/bids-validator/validators/bids/checkSamples.js @@ -1,12 +1,23 @@ +//import buildRegExp from '../../utils/type' +//import conditionalMatch from '../../utils/type' + const Issue = require('../../utils').issues.Issue -const checkSamples = jsonContentsDict => { +const regExpSamples = buildRegExp({ + "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?microscopy[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(_sample-[a-zA-Z0-9]+)(?:_chunk-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(@@@_microscopy_type_@@@)(@@@_microscopy_ext_@@@)$", + "tokens": { + "@@@_microscopy_type_@@@": ["_tem","_sem","_ct","_bf","_df","_pc","_dic","_fluo","_conf","_pli","_cars","_2pe","_mpe","_sr","_nlo","_oct","_spim"], + "@@@_microscopy_ext_@@@": [".ome\\.tif",".ome\\.btf",".tif",".png",".json"] + } +}) + + +const checkSamples = tsvContentsDict => { let issues = [] - const jsonFilePaths = Object.keys(jsonContentsDict) - const hasSamples = jsonFilePaths.some(path => { - return path == '/samples.json' - }) - + const tsvFilePaths = Object.keys(tsvContentsDict) + + const hasSamples = tsvFilePaths.some(path => { + return path == '/samples.tsv'}) if (!hasSamples) { issues.push(new Issue({ code: 214 })) @@ -15,3 +26,35 @@ const checkSamples = jsonContentsDict => { } export default checkSamples + + +// from '../../utils/type.js', the two following functions are used: + +function conditionalMatch(expression, path) { + var match = expression.exec(path) + + // we need to do this because JS does not support conditional groups + if (match) { + if ((match[2] && match[3]) || !match[2]) { + return true + } + } + return false +} + +/** + * Insert tokens into RegExps from bids-validator-common + */ +function buildRegExp(obj) { + if (obj.tokens) { + let regExp = obj.regexp + const keys = Object.keys(obj.tokens) + for (let key of keys) { + const args = obj.tokens[key].join('|') + regExp = regExp.replace(key, args) + } + return new RegExp(regExp) + } else { + return new RegExp(obj.regexp) + } +} From 9db34662f98d9b8d0167fa5b5e71ef191da12ba8 Mon Sep 17 00:00:00 2001 From: Etienne Bergeron Date: Thu, 5 Aug 2021 11:23:05 -0400 Subject: [PATCH 033/101] The checkSamples method was working with 'samples.json', but not with 'samples.tsv'. checkSamples deleted and its call in fullTest.js as well. --- .../validators/bids/checkSamples.js | 60 ------------------- bids-validator/validators/bids/fullTest.js | 11 ++-- 2 files changed, 6 insertions(+), 65 deletions(-) delete mode 100644 bids-validator/validators/bids/checkSamples.js diff --git a/bids-validator/validators/bids/checkSamples.js b/bids-validator/validators/bids/checkSamples.js deleted file mode 100644 index e22d2ce35..000000000 --- a/bids-validator/validators/bids/checkSamples.js +++ /dev/null @@ -1,60 +0,0 @@ -//import buildRegExp from '../../utils/type' -//import conditionalMatch from '../../utils/type' - -const Issue = require('../../utils').issues.Issue - -const regExpSamples = buildRegExp({ - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?microscopy[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(_sample-[a-zA-Z0-9]+)(?:_chunk-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(@@@_microscopy_type_@@@)(@@@_microscopy_ext_@@@)$", - "tokens": { - "@@@_microscopy_type_@@@": ["_tem","_sem","_ct","_bf","_df","_pc","_dic","_fluo","_conf","_pli","_cars","_2pe","_mpe","_sr","_nlo","_oct","_spim"], - "@@@_microscopy_ext_@@@": [".ome\\.tif",".ome\\.btf",".tif",".png",".json"] - } -}) - - -const checkSamples = tsvContentsDict => { - let issues = [] - const tsvFilePaths = Object.keys(tsvContentsDict) - - const hasSamples = tsvFilePaths.some(path => { - return path == '/samples.tsv'}) - - if (!hasSamples) { - issues.push(new Issue({ code: 214 })) - } else { } - return issues -} - -export default checkSamples - - -// from '../../utils/type.js', the two following functions are used: - -function conditionalMatch(expression, path) { - var match = expression.exec(path) - - // we need to do this because JS does not support conditional groups - if (match) { - if ((match[2] && match[3]) || !match[2]) { - return true - } - } - return false -} - -/** - * Insert tokens into RegExps from bids-validator-common - */ -function buildRegExp(obj) { - if (obj.tokens) { - let regExp = obj.regexp - const keys = Object.keys(obj.tokens) - for (let key of keys) { - const args = obj.tokens[key].join('|') - regExp = regExp.replace(key, args) - } - return new RegExp(regExp) - } else { - return new RegExp(obj.regexp) - } -} diff --git a/bids-validator/validators/bids/fullTest.js b/bids-validator/validators/bids/fullTest.js index 41bcff9e1..d1cfb27a2 100644 --- a/bids-validator/validators/bids/fullTest.js +++ b/bids-validator/validators/bids/fullTest.js @@ -14,7 +14,6 @@ import subSesMismatchTest from './subSesMismatchTest' import groupFileTypes from './groupFileTypes' import subjects from './subjects' import checkDatasetDescription from './checkDatasetDescription' -import checkSamples from './checkSamples' import checkReadme from './checkReadme' import validateMisc from '../../utils/files/validateMisc' import collectSubjectMetadata from '../../utils/summary/collectSubjectMetadata' @@ -43,6 +42,7 @@ const fullTest = (fileList, options, annexed, dir, schema, callback) => { const phenotypeParticipants = [] const tsvs = [] + const samples = [] const summary = utils.collectSummary(fileList, self.options, schema) @@ -90,6 +90,10 @@ const fullTest = (fileList, options, annexed, dir, schema, callback) => { ) } + // Does + // Does 'files.tsv' has a file with in it? --Yes--> needSample + + validateMisc(files.misc) .then(miscIssues => { self.issues = self.issues.concat(miscIssues) @@ -103,6 +107,7 @@ const fullTest = (fileList, options, annexed, dir, schema, callback) => { participants, phenotypeParticipants, stimuli, + samples ) }) .then(({ tsvIssues, participantsTsvContent }) => { @@ -137,10 +142,6 @@ const fullTest = (fileList, options, annexed, dir, schema, callback) => { const datasetDescriptionIssues = checkDatasetDescription(jsonContentsDict) self.issues = self.issues.concat(datasetDescriptionIssues) - // Check for samples file in the proper place - const samplesIssues = checkSamples(jsonContentsDict) - self.issues = self.issues.concat(samplesIssues) - // Check for README file in the proper place const readmeIssues = checkReadme(fileList) self.issues = self.issues.concat(readmeIssues) From cb2c396b9cb0436b4f9cbad3fbf36fe85906243d Mon Sep 17 00:00:00 2001 From: Etienne Bergeron Date: Thu, 5 Aug 2021 14:58:01 -0400 Subject: [PATCH 034/101] Didn't succeed to figure out how to detect samples.tsv. I restored the files 'fullTest.js', 'tsv.js' and 'validate.js'. I will work on something else and come back later on it. --- bids-validator/validators/bids/fullTest.js | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/bids-validator/validators/bids/fullTest.js b/bids-validator/validators/bids/fullTest.js index d1cfb27a2..ae7498610 100644 --- a/bids-validator/validators/bids/fullTest.js +++ b/bids-validator/validators/bids/fullTest.js @@ -42,7 +42,6 @@ const fullTest = (fileList, options, annexed, dir, schema, callback) => { const phenotypeParticipants = [] const tsvs = [] - const samples = [] const summary = utils.collectSummary(fileList, self.options, schema) @@ -90,10 +89,6 @@ const fullTest = (fileList, options, annexed, dir, schema, callback) => { ) } - // Does - // Does 'files.tsv' has a file with in it? --Yes--> needSample - - validateMisc(files.misc) .then(miscIssues => { self.issues = self.issues.concat(miscIssues) @@ -106,10 +101,10 @@ const fullTest = (fileList, options, annexed, dir, schema, callback) => { events, participants, phenotypeParticipants, - stimuli, - samples + stimuli ) }) + .then(({ tsvIssues, participantsTsvContent }) => { self.issues = self.issues.concat(tsvIssues) @@ -119,6 +114,7 @@ const fullTest = (fileList, options, annexed, dir, schema, callback) => { // Bvec validation return bvec.validate(files.bvec, bContentsDict) }) + .then(bvecIssues => { self.issues = self.issues.concat(bvecIssues) From ac1922c310395b38b583febf0b281ac56936f7a5 Mon Sep 17 00:00:00 2001 From: Etienne Bergeron Date: Thu, 5 Aug 2021 15:03:14 -0400 Subject: [PATCH 035/101] Removed enters in fullTest.js. --- bids-validator/validators/bids/fullTest.js | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/bids-validator/validators/bids/fullTest.js b/bids-validator/validators/bids/fullTest.js index ae7498610..a9c4d240f 100644 --- a/bids-validator/validators/bids/fullTest.js +++ b/bids-validator/validators/bids/fullTest.js @@ -101,10 +101,9 @@ const fullTest = (fileList, options, annexed, dir, schema, callback) => { events, participants, phenotypeParticipants, - stimuli + stimuli, ) }) - .then(({ tsvIssues, participantsTsvContent }) => { self.issues = self.issues.concat(tsvIssues) @@ -114,7 +113,6 @@ const fullTest = (fileList, options, annexed, dir, schema, callback) => { // Bvec validation return bvec.validate(files.bvec, bContentsDict) }) - .then(bvecIssues => { self.issues = self.issues.concat(bvecIssues) From 770f8c93c5129d73b4f97fd3b44777592bf718d3 Mon Sep 17 00:00:00 2001 From: Etienne Bergeron Date: Thu, 5 Aug 2021 16:09:38 -0400 Subject: [PATCH 036/101] Removed the warning for RECOMMENDED JSON fields missing. Doesn't seem implemented yet for the other modalities. --- bids-validator/utils/issues/list.js | 6 ------ bids-validator/validators/json/json.js | 27 +------------------------- 2 files changed, 1 insertion(+), 32 deletions(-) diff --git a/bids-validator/utils/issues/list.js b/bids-validator/utils/issues/list.js index 99711ddc5..3cbbc6583 100644 --- a/bids-validator/utils/issues/list.js +++ b/bids-validator/utils/issues/list.js @@ -1053,11 +1053,5 @@ export default { severity: 'error', reason: "PixelSizeUnits was not defined in microseconds (um) in the image's metadata.", - }, -216: { - key: 'JSON_FIELD_MISSING', - severity: 'warning', - reason: - 'One or more recommended fields are missing in the JSON file. See Section Microscopy of the BIDS specification.', } } diff --git a/bids-validator/validators/json/json.js b/bids-validator/validators/json/json.js index d18a1964d..78314454b 100644 --- a/bids-validator/validators/json/json.js +++ b/bids-validator/validators/json/json.js @@ -116,7 +116,6 @@ const selectSchema = file => { file.name.endsWith('spim.json') ) { schema = require('./schemas/microscopy.json') - } else if ( file.relativePath.includes('/meg/') && file.name.endsWith('coordsystem.json') @@ -131,31 +130,7 @@ const selectSchema = file => { file.relativePath.includes('/eeg/') && file.name.endsWith('coordsystem.json') ) { - schema = require('./schemas/coordsystem_eeg.json') - } else if ( - file.relativePath.includes('/microscopy/') && - ( - file.name.endsWith('tem.json') || - file.name.endsWith('sem.json') || - file.name.endsWith('ct.json') || - file.name.endsWith('bf.json') || - file.name.endsWith('df.json') || - file.name.endsWith('pc.json') || - file.name.endsWith('dic.json') || - file.name.endsWith('fluo.json') || - file.name.endsWith('conf.json') || - file.name.endsWith('pli.json') || - file.name.endsWith('cars.json') || - file.name.endsWith('2pe.json') || - file.name.endsWith('mpe.json') || - file.name.endsWith('sr.json') || - file.name.endsWith('nlo.json') || - file.name.endsWith('oct.json') || - file.name.endsWith('spim.json') - ) - ) { - schema = require('./schemas/microscopy.json') - + schema = require('./schemas/coordsystem_eeg.json') } else if ( file.relativePath.includes('/pet/') && file.name.endsWith('blood.json') From a6be30d52c83cf964d5d70a141321fff2e36b0b0 Mon Sep 17 00:00:00 2001 From: Etienne Bergeron <79805277+etiennebergeron@users.noreply.github.com> Date: Fri, 6 Aug 2021 10:32:40 -0400 Subject: [PATCH 037/101] Update bids-validator/utils/issues/list.js Co-authored-by: mariehbourget <54086142+mariehbourget@users.noreply.github.com> --- bids-validator/utils/issues/list.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bids-validator/utils/issues/list.js b/bids-validator/utils/issues/list.js index 3cbbc6583..29466822d 100644 --- a/bids-validator/utils/issues/list.js +++ b/bids-validator/utils/issues/list.js @@ -1052,6 +1052,6 @@ export default { key: 'PIXEL_SIZE_UNITS', severity: 'error', reason: - "PixelSizeUnits was not defined in microseconds (um) in the image's metadata.", + "PixelSizeUnits was not defined in micrometers (um) in the image's metadata.", } } From 73f76caa88c19a5d8363f61ad2045c2cdb13103b Mon Sep 17 00:00:00 2001 From: Etienne Bergeron <79805277+etiennebergeron@users.noreply.github.com> Date: Fri, 6 Aug 2021 10:48:11 -0400 Subject: [PATCH 038/101] Update bids-validator/validators/json/json.js Co-authored-by: mariehbourget <54086142+mariehbourget@users.noreply.github.com> --- bids-validator/validators/json/json.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bids-validator/validators/json/json.js b/bids-validator/validators/json/json.js index 78314454b..3636634b3 100644 --- a/bids-validator/validators/json/json.js +++ b/bids-validator/validators/json/json.js @@ -130,7 +130,7 @@ const selectSchema = file => { file.relativePath.includes('/eeg/') && file.name.endsWith('coordsystem.json') ) { - schema = require('./schemas/coordsystem_eeg.json') + schema = require('./schemas/coordsystem_eeg.json') } else if ( file.relativePath.includes('/pet/') && file.name.endsWith('blood.json') From 51fa4576fa8a5695ed1b7396fd7ef6802239488a Mon Sep 17 00:00:00 2001 From: Etienne Bergeron Date: Fri, 6 Aug 2021 10:54:46 -0400 Subject: [PATCH 039/101] Changed the suffixes to uppercase in the rules and 'json.js'. --- .../rules/file_level_rules.json | 34 +++++++++---------- .../rules/session_level_rules.json | 34 +++++++++---------- .../bids_validator/rules/top_level_rules.json | 34 +++++++++---------- bids-validator/validators/json/json.js | 34 +++++++++---------- 4 files changed, 68 insertions(+), 68 deletions(-) diff --git a/bids-validator/bids_validator/rules/file_level_rules.json b/bids-validator/bids_validator/rules/file_level_rules.json index 3b0aa6720..efb09dc3e 100644 --- a/bids-validator/bids_validator/rules/file_level_rules.json +++ b/bids-validator/bids_validator/rules/file_level_rules.json @@ -529,23 +529,23 @@ "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?microscopy[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_sample-[a-zA-Z0-9]+)?(?:_chunk-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(@@@_microscopy_type_@@@)(@@@_microscopy_ext_@@@)$", "tokens": { "@@@_microscopy_type_@@@": [ - "_tem", - "_sem", - "_ct", - "_bf", - "_df", - "_pc", - "_dic", - "_fluo", - "_conf", - "_pli", - "_cars", - "_2pe", - "_mpe", - "_sr", - "_nlo", - "_oct", - "_spim" + "_TEM", + "_SEM", + "_CT", + "_BF", + "_DF", + "_PC", + "_DIC", + "_FLUO", + "_CONF", + "_PLI", + "_CARS", + "_2PE", + "_MPE", + "_SR", + "_NLO", + "_OCT", + "_SPIM" ], "@@@_microscopy_ext_@@@": [ ".ome\\.tif", diff --git a/bids-validator/bids_validator/rules/session_level_rules.json b/bids-validator/bids_validator/rules/session_level_rules.json index f32b0afdf..e956bf608 100644 --- a/bids-validator/bids_validator/rules/session_level_rules.json +++ b/bids-validator/bids_validator/rules/session_level_rules.json @@ -207,23 +207,23 @@ "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?\\1(_\\2)?(?:_sample-[a-zA-Z0-9]+)?(?:_chunk-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+_)?(@@@_microscopy_ses_type_@@@)$", "tokens": { "@@@_microscopy_ses_type_@@@": [ - "_tem.json", - "_sem.json", - "_ct.json", - "_bf.json", - "_df.json", - "_pc.json", - "_dic.json", - "_fluo.json", - "_conf.json", - "_pli.json", - "_cars.json", - "_2pe.json", - "_mpe.json", - "_sr.json", - "_nlo.json", - "_oct.json", - "_spim.json" + "_TEM.json", + "_SEM.json", + "_CT.json", + "_BF.json", + "_DF.json", + "_PC.json", + "_DIC.json", + "_FLUO.json", + "_CONF.json", + "_PLI.json", + "_CARS.json", + "_2PE.json", + "_MPE.json", + "_SR.json", + "_NLO.json", + "_OCT.json", + "_SPIM.json" ] } } diff --git a/bids-validator/bids_validator/rules/top_level_rules.json b/bids-validator/bids_validator/rules/top_level_rules.json index 7dd2181c2..4cc98a9e4 100644 --- a/bids-validator/bids_validator/rules/top_level_rules.json +++ b/bids-validator/bids_validator/rules/top_level_rules.json @@ -141,23 +141,23 @@ "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)(?:_sample-[a-zA-Z0-9]+)?(?:_chunk-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:@@@_microscopy_top_ext_@@@)$", "tokens": { "@@@_microscopy_top_ext_@@@": [ - "_tem\\.json", - "_sem\\.json", - "_ct\\.json", - "_bf\\.json", - "_df\\.json", - "_pc\\.json", - "_dic\\.json", - "_fluo\\.json", - "_conf\\.json", - "_pli\\.json", - "_cars\\.json", - "_2pe\\.json", - "_mpe\\.json", - "_sr\\.json", - "_nlo\\.json", - "_oct\\.json", - "_spim\\.json" + "_TEM\\.json", + "_SEM\\.json", + "_CT\\.json", + "_BF\\.json", + "_DF\\.json", + "_PC\\.json", + "_DIC\\.json", + "_FLUO\\.json", + "_CONF\\.json", + "_PLI\\.json", + "_CARS\\.json", + "_2PE\\.json", + "_MPE\\.json", + "_SR\\.json", + "_NLO\\.json", + "_OCT\\.json", + "_SPIM\\.json" ] } } diff --git a/bids-validator/validators/json/json.js b/bids-validator/validators/json/json.js index 3636634b3..cbd696475 100644 --- a/bids-validator/validators/json/json.js +++ b/bids-validator/validators/json/json.js @@ -97,23 +97,23 @@ const selectSchema = file => { } else if (file.name.endsWith('eeg.json')) { schema = require('./schemas/eeg.json') } else if ( - file.name.endsWith('tem.json') || - file.name.endsWith('sem.json') || - file.name.endsWith('ct.json') || - file.name.endsWith('bf.json') || - file.name.endsWith('df.json') || - file.name.endsWith('pc.json') || - file.name.endsWith('dic.json') || - file.name.endsWith('fluo.json') || - file.name.endsWith('conf.json') || - file.name.endsWith('pli.json') || - file.name.endsWith('cars.json') || - file.name.endsWith('2pe.json') || - file.name.endsWith('mpe.json') || - file.name.endsWith('sr.json') || - file.name.endsWith('nlo.json') || - file.name.endsWith('oct.json') || - file.name.endsWith('spim.json') + file.name.endsWith('TEM.json') || + file.name.endsWith('SEM.json') || + file.name.endsWith('CT.json') || + file.name.endsWith('BF.json') || + file.name.endsWith('DF.json') || + file.name.endsWith('PC.json') || + file.name.endsWith('DIC.json') || + file.name.endsWith('FLUO.json') || + file.name.endsWith('CONF.json') || + file.name.endsWith('PLI.json') || + file.name.endsWith('CARS.json') || + file.name.endsWith('2PE.json') || + file.name.endsWith('MPE.json') || + file.name.endsWith('SR.json') || + file.name.endsWith('NLO.json') || + file.name.endsWith('OCT.json') || + file.name.endsWith('SPIM.json') ) { schema = require('./schemas/microscopy.json') } else if ( From 78d2d59869d28edc5b899fb05d5f81ce58e8539e Mon Sep 17 00:00:00 2001 From: Etienne Bergeron Date: Fri, 6 Aug 2021 14:31:56 -0400 Subject: [PATCH 040/101] The entity 'sample' is REQUIRED in microscopy, so modified the regex rules to suit that. --- bids-validator/bids_validator/rules/file_level_rules.json | 2 +- bids-validator/bids_validator/rules/session_level_rules.json | 2 +- bids-validator/bids_validator/rules/top_level_rules.json | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bids-validator/bids_validator/rules/file_level_rules.json b/bids-validator/bids_validator/rules/file_level_rules.json index efb09dc3e..8abc2d68e 100644 --- a/bids-validator/bids_validator/rules/file_level_rules.json +++ b/bids-validator/bids_validator/rules/file_level_rules.json @@ -526,7 +526,7 @@ }, "microscopy": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?microscopy[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_sample-[a-zA-Z0-9]+)?(?:_chunk-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(@@@_microscopy_type_@@@)(@@@_microscopy_ext_@@@)$", + "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?microscopy[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_sample-[a-zA-Z0-9]+)(?:_chunk-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(@@@_microscopy_type_@@@)(@@@_microscopy_ext_@@@)$", "tokens": { "@@@_microscopy_type_@@@": [ "_TEM", diff --git a/bids-validator/bids_validator/rules/session_level_rules.json b/bids-validator/bids_validator/rules/session_level_rules.json index e956bf608..7f4cf040c 100644 --- a/bids-validator/bids_validator/rules/session_level_rules.json +++ b/bids-validator/bids_validator/rules/session_level_rules.json @@ -204,7 +204,7 @@ }, "microscopy_ses": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?\\1(_\\2)?(?:_sample-[a-zA-Z0-9]+)?(?:_chunk-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+_)?(@@@_microscopy_ses_type_@@@)$", + "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?\\1(_\\2)?(?:_sample-[a-zA-Z0-9]+)(?:_chunk-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+_)?(@@@_microscopy_ses_type_@@@)$", "tokens": { "@@@_microscopy_ses_type_@@@": [ "_TEM.json", diff --git a/bids-validator/bids_validator/rules/top_level_rules.json b/bids-validator/bids_validator/rules/top_level_rules.json index 4cc98a9e4..d6aa08818 100644 --- a/bids-validator/bids_validator/rules/top_level_rules.json +++ b/bids-validator/bids_validator/rules/top_level_rules.json @@ -138,7 +138,7 @@ }, "microscopy_top": { - "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)(?:_sample-[a-zA-Z0-9]+)?(?:_chunk-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:@@@_microscopy_top_ext_@@@)$", + "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)(?:_sample-[a-zA-Z0-9]+)(?:_chunk-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:@@@_microscopy_top_ext_@@@)$", "tokens": { "@@@_microscopy_top_ext_@@@": [ "_TEM\\.json", From 9dcafaa9887ac74291b52488b0ff25bf56967ff5 Mon Sep 17 00:00:00 2001 From: Etienne Bergeron Date: Mon, 9 Aug 2021 13:58:20 -0400 Subject: [PATCH 041/101] Added a new 'checkSamples.js' file, but this time based on 'checkReadme.js'. The file is called in the fullTest only if the dataset has for modality 'Microscopy'. --- .../utils/summary/collectModalities.js | 4 ++++ bids-validator/validators/bids/checkSamples.js | 16 ++++++++++++++++ bids-validator/validators/bids/fullTest.js | 7 +++++++ 3 files changed, 27 insertions(+) create mode 100644 bids-validator/validators/bids/checkSamples.js diff --git a/bids-validator/utils/summary/collectModalities.js b/bids-validator/utils/summary/collectModalities.js index 4d0b3a903..7d95cee38 100644 --- a/bids-validator/utils/summary/collectModalities.js +++ b/bids-validator/utils/summary/collectModalities.js @@ -7,6 +7,7 @@ export const collectModalities = filenames => { MEG: 0, EEG: 0, iEEG: 0, + Microscopy: 0, } const secondary = { MRI_Diffusion: 0, @@ -56,6 +57,9 @@ export const collectModalities = filenames => { if (type.file.isIEEG(path)) { modalities.iEEG++ } + if (type.file.isMicroscopy(path)) { + modalities.Microscopy++ + } } // Order by matching file count const nonZero = Object.keys(modalities).filter(a => modalities[a] !== 0) diff --git a/bids-validator/validators/bids/checkSamples.js b/bids-validator/validators/bids/checkSamples.js new file mode 100644 index 000000000..c535a8053 --- /dev/null +++ b/bids-validator/validators/bids/checkSamples.js @@ -0,0 +1,16 @@ +import isNode from '../../utils/isNode' + +const Issue = require('../../utils').issues.Issue + +const checkSamples = fileList => { + const issues = [] + const fileKeys = Object.keys(fileList) + const samplesFile = Array.from(Object.values(fileList)).find( + file => file.relativePath && file.relativePath == '/samples.tsv', + ) + if (!samplesFile) { + issues.push(new Issue({ code: 214 })) + } + return issues +} +export default checkSamples diff --git a/bids-validator/validators/bids/fullTest.js b/bids-validator/validators/bids/fullTest.js index a9c4d240f..1351772bf 100644 --- a/bids-validator/validators/bids/fullTest.js +++ b/bids-validator/validators/bids/fullTest.js @@ -15,6 +15,8 @@ import groupFileTypes from './groupFileTypes' import subjects from './subjects' import checkDatasetDescription from './checkDatasetDescription' import checkReadme from './checkReadme' +import checkSamples from './checkSamples' +import type from '../../utils/type' import validateMisc from '../../utils/files/validateMisc' import collectSubjectMetadata from '../../utils/summary/collectSubjectMetadata' import collectPetFields from '../../utils/summary/collectPetFields' @@ -140,6 +142,11 @@ const fullTest = (fileList, options, annexed, dir, schema, callback) => { const readmeIssues = checkReadme(fileList) self.issues = self.issues.concat(readmeIssues) + // Check for samples file in the proper place (only for the microscopy modality) + if (summary.modalities.includes('Microscopy')) { + const samplesIssues = checkSamples(fileList) + self.issues = self.issues.concat(samplesIssues) + } // Validate json files and contents return json.validate(jsonFiles, fileList, jsonContentsDict, summary) }) From 9e13c3f34d15c0b72c2901425a74a0ad8f4b6567 Mon Sep 17 00:00:00 2001 From: Etienne Bergeron Date: Mon, 9 Aug 2021 14:00:36 -0400 Subject: [PATCH 042/101] Removed unnecessary import. --- bids-validator/validators/bids/fullTest.js | 1 - 1 file changed, 1 deletion(-) diff --git a/bids-validator/validators/bids/fullTest.js b/bids-validator/validators/bids/fullTest.js index 1351772bf..138e4ed4b 100644 --- a/bids-validator/validators/bids/fullTest.js +++ b/bids-validator/validators/bids/fullTest.js @@ -16,7 +16,6 @@ import subjects from './subjects' import checkDatasetDescription from './checkDatasetDescription' import checkReadme from './checkReadme' import checkSamples from './checkSamples' -import type from '../../utils/type' import validateMisc from '../../utils/files/validateMisc' import collectSubjectMetadata from '../../utils/summary/collectSubjectMetadata' import collectPetFields from '../../utils/summary/collectPetFields' From c10ab225635c397539c036019ee0e4fca39441d7 Mon Sep 17 00:00:00 2001 From: Etienne Bergeron Date: Wed, 11 Aug 2021 15:03:43 -0400 Subject: [PATCH 043/101] Added the rules related to the 'samples.tsv' file in 'tsv.js'. Added the related errors in the 'list.js' file. --- bids-validator/utils/issues/list.js | 36 +++++++ bids-validator/validators/tsv/tsv.js | 147 ++++++++++++++++++++++++++- 2 files changed, 182 insertions(+), 1 deletion(-) diff --git a/bids-validator/utils/issues/list.js b/bids-validator/utils/issues/list.js index 29466822d..274eb0d5c 100644 --- a/bids-validator/utils/issues/list.js +++ b/bids-validator/utils/issues/list.js @@ -1049,6 +1049,42 @@ export default { 'The compulsory file /samples.tsv is missing. See Section 03 (Modality agnostic files) of the BIDS specification.', }, 215: { + key: 'SAMPLE_ID_PATTERN', + severity: 'error', + reason: + 'sample_id column labels must consist of the pattern "sample-".', + }, + 216: { + key: 'SAMPLE_ID_COLUMN', + severity: 'error', + reason: + "Samples .tsv files must have a 'sample_id' column.", + }, + 217: { + key: 'PARTICIPANT_ID_COLUMN', + severity: 'error', + reason: + "Samples .tsv files must have a 'participant_id' column.", + }, + 218: { + key: 'SAMPLE_TYPE_COLUMN', + severity: 'error', + reason: + "Samples .tsv files must have a 'sample_type' column.", + }, + 219: { + key: 'SAMPLE_TYPE_VALUE', + severity: 'error', + reason: + "sample_type MUST consist of one of the following values: cell line, in vitro differentiated cells, primary cell, cell-free sample, cloning host, tissue, whole organisms, organoid or technical sample.", + }, + 220: { + key: 'SAMPLE_ID_DUPLICATE', + severity: 'error', + reason: + "Each sample MUST be described by one and only one row.", + }, + 221: { key: 'PIXEL_SIZE_UNITS', severity: 'error', reason: diff --git a/bids-validator/validators/tsv/tsv.js b/bids-validator/validators/tsv/tsv.js index 2d6c72459..d942e7e47 100644 --- a/bids-validator/validators/tsv/tsv.js +++ b/bids-validator/validators/tsv/tsv.js @@ -242,6 +242,152 @@ const TSV = (file, contents, fileList, callback) => { } } + // samples.tsv + let samples = null + if (file.name === 'samples.tsv') { + const sampleIdColumnValues = [] + const sampleIdColumn = headers.indexOf('sample_id') + const participantIdColumn = headers.indexOf('participant_id') + const sampleTypeColumn = headers.indexOf('sample_type') + + // if the sample_id column is missing, an error + // will be raised + if (sampleIdColumn === -1) { + issues.push( + new Issue({ + file: file, + evidence: headersEvidence(headers), + line: 1, + code: 216, + }), + ) + } + // if the participant_id column is missing, an error + // will be raised + else if (participantIdColumn === -1) { + issues.push( + new Issue({ + file: file, + evidence: headersEvidence(headers), + line: 1, + code: 217, + }), + ) + } + // if the sample_type column is missing, an error + // will be raised + else if (sampleTypeColumn === -1) { + issues.push( + new Issue({ + file: file, + evidence: headersEvidence(headers), + line: 1, + code: 218, + }), + ) + } else { + // otherwise, the samples should comprise of + // sample- and one sample per row + samples = [] + for (let l = 1; l < rows.length; l++) { + const row = rows[l] + // skip empty rows + if (!row || /^\s*$/.test(row)) { + continue + } + sampleIdColumnValues.push(row[sampleIdColumn]) + // check if any incorrect patterns in sample_id column + if (!row[sampleIdColumn].startsWith('sample-')) { + issues.push( + new Issue({ + file: file, + evidence: row[sampleIdColumn], + reason: 'sample_id column should be named ' + + 'as sample-.', + line: l, + code: 215, + }), + ) + } + + // check if a sample is described by one and only one row + const doesArrayHaveDuplicates = sampleIdColumnValues.some( + (val, i) => sampleIdColumnValues.indexOf(val) !== i + ) + if (doesArrayHaveDuplicates == true) { + issues.push( + new Issue({ + file: file, + evidence: sampleIdColumnValues, + reason: 'Each sample MUST be described by one and only one row.', + line: l, + code: 220, + }), + ) + } + } + // The participants should comprise of + // sub- and one subject per row + participants = [] + for (let l = 1; l < rows.length; l++) { + const row = rows[l] + // skip empty rows + if (!row || /^\s*$/.test(row)) { + continue + } + + // check if any incorrect patterns in participant_id column + if (!row[participantIdColumn].startsWith('sub-')) { + issues.push( + new Issue({ + file: file, + evidence: row[participantIdColumn], + reason: 'Participant_id column should be named ' + + 'as sub-.', + line: l, + code: 212, + }), + ) + } + + // obtain a list of the sample IDs in the samples.tsv file + const sample = row[sampleIdColumn].replace('sample-', '') + if (sample == 'emptyroom') { + continue + } + samples.push(sample) + } + } + + + // check if any incorrect patterns in sample_type column + for (let c = 1; c < rows.length; c++) { + const row = rows[c] + if ((row[sampleTypeColumn] != 'cell line') && + (row[sampleTypeColumn] != 'in vitro differentiated cells') && + (row[sampleTypeColumn] != 'primary cell') && + (row[sampleTypeColumn] != 'cell-free sample') && + (row[sampleTypeColumn] != 'cloning host') && + (row[sampleTypeColumn] != 'tissue') && + (row[sampleTypeColumn] != 'whole organisms') && + (row[sampleTypeColumn] != 'organoid') && + (row[sampleTypeColumn] != 'technical sample')) { + issues.push( + new Issue({ + file: file, + evidence: row[sampleTypeColumn], + reason: 'sample_type can\'t be any value.', + line: c, + code: 219, + }), + ) + } + } + } + + + + if ( file.relativePath.includes('/meg/') && file.name.endsWith('_channels.tsv') @@ -414,5 +560,4 @@ const TSV = (file, contents, fileList, callback) => { } callback(issues, participants, stimPaths) } - export default TSV From 0c1414ea7b03dcf9c98d06298eb667689bd26189 Mon Sep 17 00:00:00 2001 From: Etienne Bergeron Date: Thu, 12 Aug 2021 12:00:09 -0400 Subject: [PATCH 044/101] The PixelSizeUnits now needs to be in um. Currently using the existing error code: 55 where a JSON field's value is not equal to one of the allowed values, so the code 221 is deleted. --- bids-validator/utils/issues/list.js | 6 ------ bids-validator/validators/json/schemas/microscopy.json | 2 +- 2 files changed, 1 insertion(+), 7 deletions(-) diff --git a/bids-validator/utils/issues/list.js b/bids-validator/utils/issues/list.js index 274eb0d5c..624efdfc0 100644 --- a/bids-validator/utils/issues/list.js +++ b/bids-validator/utils/issues/list.js @@ -1083,11 +1083,5 @@ export default { severity: 'error', reason: "Each sample MUST be described by one and only one row.", - }, - 221: { - key: 'PIXEL_SIZE_UNITS', - severity: 'error', - reason: - "PixelSizeUnits was not defined in micrometers (um) in the image's metadata.", } } diff --git a/bids-validator/validators/json/schemas/microscopy.json b/bids-validator/validators/json/schemas/microscopy.json index 754bdc594..ac6900849 100644 --- a/bids-validator/validators/json/schemas/microscopy.json +++ b/bids-validator/validators/json/schemas/microscopy.json @@ -38,7 +38,7 @@ "SampleExtractionInstitution": { "type": "string", "minLength": 1 }, "ShrinkageFactor": { "type": "number" }, "PixelSize": {"type": "array", "items": { "type": "number" } }, - "PixelSizeUnits": { "type": "string", "minLength": 1 }, + "PixelSizeUnits": { "type": "string", "enum": ["um"] }, "ImageAcquisitionProtocol": { "type": "string", "minLength": 1 }, "OtherAcquisitionParameters": { "type": "string", "minLength": 1 } }, From d1fe83c0c29319f679d827e00222d55b5d48308a Mon Sep 17 00:00:00 2001 From: Etienne Bergeron Date: Thu, 16 Sep 2021 12:24:27 -0400 Subject: [PATCH 045/101] Added a dictionary 'microscopy_json' in the 'file_level_rules.json' to manage the JSON name rules separately. Modified 'type.js' to import this new dictionary. Put the entity optional at the session and top level. --- .../rules/file_level_rules.json | 27 +++++++++++++++++-- .../rules/session_level_rules.json | 2 +- .../bids_validator/rules/top_level_rules.json | 2 +- bids-validator/utils/type.js | 11 ++++++-- 4 files changed, 36 insertions(+), 6 deletions(-) diff --git a/bids-validator/bids_validator/rules/file_level_rules.json b/bids-validator/bids_validator/rules/file_level_rules.json index 8abc2d68e..a527fbfb5 100644 --- a/bids-validator/bids_validator/rules/file_level_rules.json +++ b/bids-validator/bids_validator/rules/file_level_rules.json @@ -551,8 +551,31 @@ ".ome\\.tif", ".ome\\.btf", ".tif", - ".png", - ".json" + ".png" + ] + } + }, + "microscopy_json": { + "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?microscopy[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_sample-[a-zA-Z0-9]+)?(?:_chunk-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(@@@_microscopy_type_@@@)\\.json$", + "tokens": { + "@@@_microscopy_type_@@@": [ + "_TEM", + "_SEM", + "_CT", + "_BF", + "_DF", + "_PC", + "_DIC", + "_FLUO", + "_CONF", + "_PLI", + "_CARS", + "_2PE", + "_MPE", + "_SR", + "_NLO", + "_OCT", + "_SPIM" ] } } diff --git a/bids-validator/bids_validator/rules/session_level_rules.json b/bids-validator/bids_validator/rules/session_level_rules.json index 7f4cf040c..e956bf608 100644 --- a/bids-validator/bids_validator/rules/session_level_rules.json +++ b/bids-validator/bids_validator/rules/session_level_rules.json @@ -204,7 +204,7 @@ }, "microscopy_ses": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?\\1(_\\2)?(?:_sample-[a-zA-Z0-9]+)(?:_chunk-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+_)?(@@@_microscopy_ses_type_@@@)$", + "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?\\1(_\\2)?(?:_sample-[a-zA-Z0-9]+)?(?:_chunk-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+_)?(@@@_microscopy_ses_type_@@@)$", "tokens": { "@@@_microscopy_ses_type_@@@": [ "_TEM.json", diff --git a/bids-validator/bids_validator/rules/top_level_rules.json b/bids-validator/bids_validator/rules/top_level_rules.json index d6aa08818..d3ffe05ac 100644 --- a/bids-validator/bids_validator/rules/top_level_rules.json +++ b/bids-validator/bids_validator/rules/top_level_rules.json @@ -138,7 +138,7 @@ }, "microscopy_top": { - "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)(?:_sample-[a-zA-Z0-9]+)(?:_chunk-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:@@@_microscopy_top_ext_@@@)$", + "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(?:_sample-[a-zA-Z0-9]+)?(?:_chunk-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:@@@_microscopy_top_ext_@@@)$", "tokens": { "@@@_microscopy_top_ext_@@@": [ "_TEM\\.json", diff --git a/bids-validator/utils/type.js b/bids-validator/utils/type.js index eca3e8f17..5e4a988f0 100644 --- a/bids-validator/utils/type.js +++ b/bids-validator/utils/type.js @@ -60,6 +60,7 @@ const stimuliData = buildRegExp(file_level_rules.stimuli) const petData = buildRegExp(file_level_rules.pet) const petBlood = buildRegExp(file_level_rules.pet_blood) const microscopyData = buildRegExp(file_level_rules.microscopy) +const microscopyJSON = buildRegExp(file_level_rules.microscopy_json) // Phenotypic data const phenotypicData = buildRegExp(phenotypic_rules.phenotypic_data) // Session level @@ -114,7 +115,8 @@ export default { this.file.isPhenotypic(path) || this.file.isPET(path) || this.file.isPETBlood(path) || - this.file.isMicroscopy(path) + this.file.isMicroscopy(path) || + this.file.isMicroscopyJSON(path) ) }, @@ -350,6 +352,10 @@ export default { return conditionalMatch(microscopyData, path) }, + isMicroscopyJSON: function(path) { + return conditionalMatch(microscopyJSON, path) + }, + isBehavioral: function(path) { if (bids_schema) { return bids_schema.datatypes['beh'].some(regex => regex.exec(path)) @@ -377,7 +383,8 @@ export default { this.isFuncBold(path) || this.isPET(path) || this.isPETBlood(path) || - this.isMicroscopy(path) + this.isMicroscopy(path) || + this.isMicroscopyJSON(path) ) }, }, From 4a864dd78909b55a2cfc01e2fdb556bdb68343ec Mon Sep 17 00:00:00 2001 From: Etienne Bergeron Date: Sun, 19 Sep 2021 14:13:54 -0400 Subject: [PATCH 046/101] Added the Regex rule for a file named _photo.jpg. Added the JSON fields 'Immersion', 'NumericalAperture', 'Magnification', 'ChunkTransformationMatrix' and 'ChunkTransformationMatrixAxis'. --- .../bids_validator/rules/file_level_rules.json | 2 +- bids-validator/validators/json/schemas/microscopy.json | 10 ++++++++-- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/bids-validator/bids_validator/rules/file_level_rules.json b/bids-validator/bids_validator/rules/file_level_rules.json index a527fbfb5..f68b857fd 100644 --- a/bids-validator/bids_validator/rules/file_level_rules.json +++ b/bids-validator/bids_validator/rules/file_level_rules.json @@ -526,7 +526,7 @@ }, "microscopy": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?microscopy[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_sample-[a-zA-Z0-9]+)(?:_chunk-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(@@@_microscopy_type_@@@)(@@@_microscopy_ext_@@@)$", + "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?microscopy[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_sample-[a-zA-Z0-9]+)(?:_chunk-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(((@@@_microscopy_type_@@@)(@@@_microscopy_ext_@@@))|(_photo\\.jpg))$", "tokens": { "@@@_microscopy_type_@@@": [ "_TEM", diff --git a/bids-validator/validators/json/schemas/microscopy.json b/bids-validator/validators/json/schemas/microscopy.json index ac6900849..e57c6068d 100644 --- a/bids-validator/validators/json/schemas/microscopy.json +++ b/bids-validator/validators/json/schemas/microscopy.json @@ -39,8 +39,13 @@ "ShrinkageFactor": { "type": "number" }, "PixelSize": {"type": "array", "items": { "type": "number" } }, "PixelSizeUnits": { "type": "string", "enum": ["um"] }, + "Immersion": { "type": "string", "minLength": 1 }, + "NumericalAperture": { "type": "number" }, + "Magnification": { "type": "number" }, "ImageAcquisitionProtocol": { "type": "string", "minLength": 1 }, - "OtherAcquisitionParameters": { "type": "string", "minLength": 1 } + "OtherAcquisitionParameters": { "type": "string", "minLength": 1 }, + "ChunkTransformationMatrix": {"type": "array", "items": { "type": "array", "items":{ "type": "number"} } }, + "ChunkTransformationMatrixAxis": { "type": "array", "items": { "type": "string" } } }, "required": [ @@ -66,7 +71,8 @@ ], "dependencies": { - "PixelSize": ["PixelSizeUnits"] + "PixelSize": ["PixelSizeUnits"], + "ChunkTransformationMatrix": ["ChunkTransformationMatrixAxis"] } } From 43d9a8b5ee1f9d79bfcd1cbe9e9a982e8a2c19f1 Mon Sep 17 00:00:00 2001 From: Etienne Bergeron Date: Tue, 28 Sep 2021 19:45:56 -0400 Subject: [PATCH 047/101] Entities order modified; at the end. --- bids-validator/bids_validator/rules/file_level_rules.json | 4 ++-- bids-validator/bids_validator/rules/session_level_rules.json | 2 +- bids-validator/bids_validator/rules/top_level_rules.json | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/bids-validator/bids_validator/rules/file_level_rules.json b/bids-validator/bids_validator/rules/file_level_rules.json index f68b857fd..fbfbf407e 100644 --- a/bids-validator/bids_validator/rules/file_level_rules.json +++ b/bids-validator/bids_validator/rules/file_level_rules.json @@ -526,7 +526,7 @@ }, "microscopy": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?microscopy[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_sample-[a-zA-Z0-9]+)(?:_chunk-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(((@@@_microscopy_type_@@@)(@@@_microscopy_ext_@@@))|(_photo\\.jpg))$", + "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?microscopy[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_sample-[a-zA-Z0-9]+)(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_chunk-[a-zA-Z0-9]+)?(((@@@_microscopy_type_@@@)(@@@_microscopy_ext_@@@))|(_photo\\.jpg))$", "tokens": { "@@@_microscopy_type_@@@": [ "_TEM", @@ -556,7 +556,7 @@ } }, "microscopy_json": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?microscopy[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_sample-[a-zA-Z0-9]+)?(?:_chunk-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(@@@_microscopy_type_@@@)\\.json$", + "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?microscopy[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_sample-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_chunk-[a-zA-Z0-9]+)?(@@@_microscopy_type_@@@)\\.json$", "tokens": { "@@@_microscopy_type_@@@": [ "_TEM", diff --git a/bids-validator/bids_validator/rules/session_level_rules.json b/bids-validator/bids_validator/rules/session_level_rules.json index e956bf608..326ee54b6 100644 --- a/bids-validator/bids_validator/rules/session_level_rules.json +++ b/bids-validator/bids_validator/rules/session_level_rules.json @@ -204,7 +204,7 @@ }, "microscopy_ses": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?\\1(_\\2)?(?:_sample-[a-zA-Z0-9]+)?(?:_chunk-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+_)?(@@@_microscopy_ses_type_@@@)$", + "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?\\1(_\\2)?(?:_sample-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+_)?(?:_chunk-[a-zA-Z0-9]+)?(@@@_microscopy_ses_type_@@@)$", "tokens": { "@@@_microscopy_ses_type_@@@": [ "_TEM.json", diff --git a/bids-validator/bids_validator/rules/top_level_rules.json b/bids-validator/bids_validator/rules/top_level_rules.json index d3ffe05ac..66a3f01b8 100644 --- a/bids-validator/bids_validator/rules/top_level_rules.json +++ b/bids-validator/bids_validator/rules/top_level_rules.json @@ -138,7 +138,7 @@ }, "microscopy_top": { - "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(?:_sample-[a-zA-Z0-9]+)?(?:_chunk-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:@@@_microscopy_top_ext_@@@)$", + "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(?:_sample-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_chunk-[a-zA-Z0-9]+)?(?:@@@_microscopy_top_ext_@@@)$", "tokens": { "@@@_microscopy_top_ext_@@@": [ "_TEM\\.json", From 23215c8ff34670b4dbc5fc351ee75dfea829e196 Mon Sep 17 00:00:00 2001 From: Etienne Bergeron Date: Tue, 12 Oct 2021 20:01:09 -0400 Subject: [PATCH 048/101] Modified tsv.js for samples.tsv ; now two different subjects can have a sample named the same way without raising an error. --- bids-validator/utils/issues/list.js | 2 +- .../validators/json/schemas/microscopy.json | 2 +- bids-validator/validators/tsv/tsv.js | 54 +++++++++++++------ 3 files changed, 40 insertions(+), 18 deletions(-) diff --git a/bids-validator/utils/issues/list.js b/bids-validator/utils/issues/list.js index 624efdfc0..52e3b8cf9 100644 --- a/bids-validator/utils/issues/list.js +++ b/bids-validator/utils/issues/list.js @@ -1082,6 +1082,6 @@ export default { key: 'SAMPLE_ID_DUPLICATE', severity: 'error', reason: - "Each sample MUST be described by one and only one row.", + "Each sample from a same subject MUST be described by one and only one row.", } } diff --git a/bids-validator/validators/json/schemas/microscopy.json b/bids-validator/validators/json/schemas/microscopy.json index e57c6068d..ac2a0282d 100644 --- a/bids-validator/validators/json/schemas/microscopy.json +++ b/bids-validator/validators/json/schemas/microscopy.json @@ -38,7 +38,7 @@ "SampleExtractionInstitution": { "type": "string", "minLength": 1 }, "ShrinkageFactor": { "type": "number" }, "PixelSize": {"type": "array", "items": { "type": "number" } }, - "PixelSizeUnits": { "type": "string", "enum": ["um"] }, + "PixelSizeUnits": { "type": "string", "enum": ["mm", "um", "nm"] }, "Immersion": { "type": "string", "minLength": 1 }, "NumericalAperture": { "type": "number" }, "Magnification": { "type": "number" }, diff --git a/bids-validator/validators/tsv/tsv.js b/bids-validator/validators/tsv/tsv.js index d942e7e47..3a53c8b53 100644 --- a/bids-validator/validators/tsv/tsv.js +++ b/bids-validator/validators/tsv/tsv.js @@ -246,6 +246,7 @@ const TSV = (file, contents, fileList, callback) => { let samples = null if (file.name === 'samples.tsv') { const sampleIdColumnValues = [] + const participantIdColumnValues = [] const sampleIdColumn = headers.indexOf('sample_id') const participantIdColumn = headers.indexOf('participant_id') const sampleTypeColumn = headers.indexOf('sample_type') @@ -296,6 +297,7 @@ const TSV = (file, contents, fileList, callback) => { continue } sampleIdColumnValues.push(row[sampleIdColumn]) + // check if any incorrect patterns in sample_id column if (!row[sampleIdColumn].startsWith('sample-')) { issues.push( @@ -309,22 +311,6 @@ const TSV = (file, contents, fileList, callback) => { }), ) } - - // check if a sample is described by one and only one row - const doesArrayHaveDuplicates = sampleIdColumnValues.some( - (val, i) => sampleIdColumnValues.indexOf(val) !== i - ) - if (doesArrayHaveDuplicates == true) { - issues.push( - new Issue({ - file: file, - evidence: sampleIdColumnValues, - reason: 'Each sample MUST be described by one and only one row.', - line: l, - code: 220, - }), - ) - } } // The participants should comprise of // sub- and one subject per row @@ -335,6 +321,7 @@ const TSV = (file, contents, fileList, callback) => { if (!row || /^\s*$/.test(row)) { continue } + participantIdColumnValues.push(row[participantIdColumn]) // check if any incorrect patterns in participant_id column if (!row[participantIdColumn].startsWith('sub-')) { @@ -349,6 +336,7 @@ const TSV = (file, contents, fileList, callback) => { }), ) } + // obtain a list of the sample IDs in the samples.tsv file const sample = row[sampleIdColumn].replace('sample-', '') @@ -357,6 +345,40 @@ const TSV = (file, contents, fileList, callback) => { } samples.push(sample) } + + // check if a sample froma same subject is described by one and only one row + var doesSampleIdHaveDuplicates = sampleIdColumnValues.some( + (val, i) => sampleIdColumnValues.indexOf(val) !== i + ) + + for (let r = 0; r < rows.length-1; r++) { + for (let l = 0; l < rows.length-1; l++) { + if (l == r) {} + else { + if (sampleIdColumnValues[r] == sampleIdColumnValues[l]) { + if (participantIdColumnValues[r] != participantIdColumnValues[l]){ + doesSampleIdHaveDuplicates = false + } + else { + doesSampleIdHaveDuplicates = true + } + } + } + } + } + if(doesSampleIdHaveDuplicates == true) { + issues.push( + new Issue({ + file: file, + evidence: sampleIdColumnValues, + reason: 'Each sample from a same subject MUST be described by one and only one row.', + line: 1, + code: 220, + }) + ) + } + else {} + } From 69c0950d50284e07429a016c201159193e3eb472 Mon Sep 17 00:00:00 2001 From: Etienne Bergeron Date: Wed, 20 Oct 2021 21:37:09 -0400 Subject: [PATCH 049/101] Completed the JSON fields ('TissueDeformationScaling' and 'SampleEnvironment') and the suffix 'uCT' modifications on the validator. --- .../rules/file_level_rules.json | 11 ++---- .../rules/session_level_rules.json | 2 +- .../bids_validator/rules/top_level_rules.json | 6 ++-- bids-validator/validators/json/json.js | 4 +-- .../validators/json/schemas/microscopy.json | 34 ++++++++++--------- package-lock.json | 4 +-- 6 files changed, 28 insertions(+), 33 deletions(-) diff --git a/bids-validator/bids_validator/rules/file_level_rules.json b/bids-validator/bids_validator/rules/file_level_rules.json index fbfbf407e..f877e6a04 100644 --- a/bids-validator/bids_validator/rules/file_level_rules.json +++ b/bids-validator/bids_validator/rules/file_level_rules.json @@ -531,7 +531,7 @@ "@@@_microscopy_type_@@@": [ "_TEM", "_SEM", - "_CT", + "_uCT", "_BF", "_DF", "_PC", @@ -547,12 +547,7 @@ "_OCT", "_SPIM" ], - "@@@_microscopy_ext_@@@": [ - ".ome\\.tif", - ".ome\\.btf", - ".tif", - ".png" - ] + "@@@_microscopy_ext_@@@": [".ome\\.tif", ".ome\\.btf", ".tif", ".png"] } }, "microscopy_json": { @@ -561,7 +556,7 @@ "@@@_microscopy_type_@@@": [ "_TEM", "_SEM", - "_CT", + "_uCT", "_BF", "_DF", "_PC", diff --git a/bids-validator/bids_validator/rules/session_level_rules.json b/bids-validator/bids_validator/rules/session_level_rules.json index 326ee54b6..f3cfc3779 100644 --- a/bids-validator/bids_validator/rules/session_level_rules.json +++ b/bids-validator/bids_validator/rules/session_level_rules.json @@ -209,7 +209,7 @@ "@@@_microscopy_ses_type_@@@": [ "_TEM.json", "_SEM.json", - "_CT.json", + "_uCT.json", "_BF.json", "_DF.json", "_PC.json", diff --git a/bids-validator/bids_validator/rules/top_level_rules.json b/bids-validator/bids_validator/rules/top_level_rules.json index 66a3f01b8..cfffc6ea3 100644 --- a/bids-validator/bids_validator/rules/top_level_rules.json +++ b/bids-validator/bids_validator/rules/top_level_rules.json @@ -51,9 +51,7 @@ "pet_top": { "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(?:trc-[a-zA-Z0-9]+_)?(?:rec-[a-zA-Z0-9]+_)?(?:task-[a-zA-Z0-9]+_)?(?:run-[0-9]+_)?(@@@_pet_suffixes_@@@)\\.json$", "tokens": { - "@@@_pet_suffixes_@@@": [ - "pet" - ] + "@@@_pet_suffixes_@@@": ["pet"] } }, @@ -143,7 +141,7 @@ "@@@_microscopy_top_ext_@@@": [ "_TEM\\.json", "_SEM\\.json", - "_CT\\.json", + "_uCT\\.json", "_BF\\.json", "_DF\\.json", "_PC\\.json", diff --git a/bids-validator/validators/json/json.js b/bids-validator/validators/json/json.js index cbd696475..2e2b6b351 100644 --- a/bids-validator/validators/json/json.js +++ b/bids-validator/validators/json/json.js @@ -99,7 +99,7 @@ const selectSchema = file => { } else if ( file.name.endsWith('TEM.json') || file.name.endsWith('SEM.json') || - file.name.endsWith('CT.json') || + file.name.endsWith('uCT.json') || file.name.endsWith('BF.json') || file.name.endsWith('DF.json') || file.name.endsWith('PC.json') || @@ -114,7 +114,7 @@ const selectSchema = file => { file.name.endsWith('NLO.json') || file.name.endsWith('OCT.json') || file.name.endsWith('SPIM.json') - ) { + ) { schema = require('./schemas/microscopy.json') } else if ( file.relativePath.includes('/meg/') && diff --git a/bids-validator/validators/json/schemas/microscopy.json b/bids-validator/validators/json/schemas/microscopy.json index ac2a0282d..5f9e2e64e 100644 --- a/bids-validator/validators/json/schemas/microscopy.json +++ b/bids-validator/validators/json/schemas/microscopy.json @@ -3,31 +3,31 @@ "properties": { "Manufacturer": { "type": "string", "minLength": 1 }, "ManufacturersModelName": { "type": "string", "minLength": 1 }, - "DeviceSerialNumber": {"type": "string", "minLength": 1}, + "DeviceSerialNumber": { "type": "string", "minLength": 1 }, "StationName": { "type": "string", "minLength": 1 }, "SoftwareVersions": { "type": "string", "minLength": 1 }, "InstitutionName": { "type": "string", "minLength": 1 }, - "InstitutionAddress": { "type": "string", "minLength": 1 }, - "InstitutionalDepartmentName": { "type": "string", "minLength": 1 }, + "InstitutionAddress": { "type": "string", "minLength": 1 }, + "InstitutionalDepartmentName": { "type": "string", "minLength": 1 }, "BodyPart": { "type": "string", "minLength": 1 }, "BodyPartDetails": { "type": "string", "minLength": 1 }, "BodyPartDetailsOntology": { "type": "string", "minLength": 1 }, - "Environment": { "type": "string", "minLength": 1 }, + "SampleEnvironment": { "type": "string", "minLength": 1 }, "SampleEmbedding": { "type": "string", "minLength": 1 }, "SampleFixation": { "type": "string", "minLength": 1 }, - "SampleStaining": { + "SampleStaining": { "anyOf": [ { "type": "string", "minLength": 1 }, { "type": "array", "items": { "type": "string" } } ] }, - "SamplePrimaryAntibody": { + "SamplePrimaryAntibody": { "anyOf": [ { "type": "string", "minLength": 1 }, { "type": "array", "items": { "type": "string" } } ] }, - "SampleSecondaryAntibody": { + "SampleSecondaryAntibody": { "anyOf": [ { "type": "string", "minLength": 1 }, { "type": "array", "items": { "type": "string" } } @@ -36,22 +36,25 @@ "SliceThickness": { "type": "number" }, "SampleExtractionProtocol": { "type": "string", "minLength": 1 }, "SampleExtractionInstitution": { "type": "string", "minLength": 1 }, - "ShrinkageFactor": { "type": "number" }, - "PixelSize": {"type": "array", "items": { "type": "number" } }, + "TissueDeformationScaling": { "type": "number" }, + "PixelSize": { "type": "array", "items": { "type": "number" } }, "PixelSizeUnits": { "type": "string", "enum": ["mm", "um", "nm"] }, "Immersion": { "type": "string", "minLength": 1 }, "NumericalAperture": { "type": "number" }, "Magnification": { "type": "number" }, "ImageAcquisitionProtocol": { "type": "string", "minLength": 1 }, "OtherAcquisitionParameters": { "type": "string", "minLength": 1 }, - "ChunkTransformationMatrix": {"type": "array", "items": { "type": "array", "items":{ "type": "number"} } }, - "ChunkTransformationMatrixAxis": { "type": "array", "items": { "type": "string" } } + "ChunkTransformationMatrix": { + "type": "array", + "items": { "type": "array", "items": { "type": "number" } } + }, + "ChunkTransformationMatrixAxis": { + "type": "array", + "items": { "type": "string" } + } }, - "required": [ - "PixelSize", - "PixelSizeUnits" - ], + "required": ["PixelSize", "PixelSizeUnits"], "recommended": [ "Manufacturer", @@ -74,5 +77,4 @@ "PixelSize": ["PixelSizeUnits"], "ChunkTransformationMatrix": ["ChunkTransformationMatrixAxis"] } - } diff --git a/package-lock.json b/package-lock.json index 52e3722d4..bd189543c 100644 --- a/package-lock.json +++ b/package-lock.json @@ -19,7 +19,7 @@ } }, "bids-validator": { - "version": "1.8.9-dev.0", + "version": "1.8.10-dev.0", "license": "MIT", "dependencies": { "@aws-sdk/client-s3": "^3.9.0", @@ -70,7 +70,7 @@ } }, "bids-validator-web": { - "version": "1.8.9-dev.0", + "version": "1.8.10-dev.0", "license": "MIT", "dependencies": { "bootstrap": "^4.3.0", From 61c98acf7cabbb520840cf174d9034c7ff6e8fe3 Mon Sep 17 00:00:00 2001 From: Yuan Wang Date: Sat, 4 Dec 2021 23:00:30 -0700 Subject: [PATCH 050/101] Resolve some issues that mentioned in BEP031 pr changes document --- .../rules/file_level_rules.json | 24 +++++++++++++++---- .../rules/session_level_rules.json | 10 ++++++-- .../bids_validator/rules/top_level_rules.json | 6 +++++ .../validators/json/schemas/microscopy.json | 9 +++---- 4 files changed, 39 insertions(+), 10 deletions(-) diff --git a/bids-validator/bids_validator/rules/file_level_rules.json b/bids-validator/bids_validator/rules/file_level_rules.json index f877e6a04..3cc608ebf 100644 --- a/bids-validator/bids_validator/rules/file_level_rules.json +++ b/bids-validator/bids_validator/rules/file_level_rules.json @@ -377,7 +377,9 @@ "_channels\\.tsv", "_eeg\\.json", "_coordsystem\\.json", - "_photo\\.jpg" + "_photo\\.jpg", + "_photo\\.png", + "_photo\\.tif" ], "@@@_cont_ext_@@@": [ "_physio\\.tsv\\.gz", @@ -451,7 +453,9 @@ "_channels\\.tsv", "_ieeg\\.json", "_coordsystem\\.json", - "_photo\\.jpg" + "_photo\\.jpg", + "_photo\\.png", + "_photo\\.tif" ], "@@@_cont_ext_@@@": [ "_physio\\.tsv\\.gz", @@ -481,6 +485,8 @@ "_meg\\.json", "_coordsystem\\.json", "_photo\\.jpg", + "_photo\\.png", + "_photo\\.tif", "_headshape\\.pos", "_markers\\.(?:mrk|sqd)" ], @@ -526,7 +532,7 @@ }, "microscopy": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?microscopy[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_sample-[a-zA-Z0-9]+)(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_chunk-[a-zA-Z0-9]+)?(((@@@_microscopy_type_@@@)(@@@_microscopy_ext_@@@))|(_photo\\.jpg))$", + "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?microscopy[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_sample-[a-zA-Z0-9]+)(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_chunk-[a-zA-Z0-9]+)?(((@@@_microscopy_type_@@@)(@@@_microscopy_ext_@@@))(@@@_photo_ext_@@@))$", "tokens": { "@@@_microscopy_type_@@@": [ "_TEM", @@ -547,7 +553,17 @@ "_OCT", "_SPIM" ], - "@@@_microscopy_ext_@@@": [".ome\\.tif", ".ome\\.btf", ".tif", ".png"] + "@@@_microscopy_ext_@@@": [ + ".ome\\.tif", + ".ome\\.btf", + ".tif", + ".png" + ], + "@@@_photo_ext_@@@": [ + "_photo\\.jpg", + "_photo\\.png", + "_photo\\.tif" + ] } }, "microscopy_json": { diff --git a/bids-validator/bids_validator/rules/session_level_rules.json b/bids-validator/bids_validator/rules/session_level_rules.json index f3cfc3779..bdf7c04ea 100644 --- a/bids-validator/bids_validator/rules/session_level_rules.json +++ b/bids-validator/bids_validator/rules/session_level_rules.json @@ -77,6 +77,8 @@ "_meg.json", "_coordsystem.json", "_photo.jpg", + "_photo.png", + "_photo.tif", "_headshape.pos" ] } @@ -93,7 +95,9 @@ "_electrodes.json", "_eeg.json", "_coordsystem.json", - "_photo.jpg" + "_photo.jpg", + "_photo.png", + "_photo.tif" ], "@@@_eeg_space_@@@": [ "Other", @@ -156,7 +160,9 @@ "_electrodes.json", "_ieeg.json", "_coordsystem.json", - "_photo.jpg" + "_photo.jpg", + "_photo.png", + "_photo.tif" ], "@@@_ieeg_space_@@@": [ "Other", diff --git a/bids-validator/bids_validator/rules/top_level_rules.json b/bids-validator/bids_validator/rules/top_level_rules.json index cfffc6ea3..871f5331e 100644 --- a/bids-validator/bids_validator/rules/top_level_rules.json +++ b/bids-validator/bids_validator/rules/top_level_rules.json @@ -95,6 +95,8 @@ "_electrodes\\.tsv", "_electrodes\\.json", "_photo\\.jpg", + "_photo\\.png", + "_photo\\.tif", "_coordsystem\\.json" ] } @@ -109,6 +111,8 @@ "_electrodes\\.tsv", "_electrodes\\.json", "_photo\\.jpg", + "_photo\\.png", + "_photo\\.tif", "_coordsystem\\.json" ] } @@ -121,6 +125,8 @@ "_channels\\.tsv", "_channels\\.json", "_photo\\.jpg", + "_photo\\.png", + "_photo\\.tif", "_coordsystem\\.json" ] } diff --git a/bids-validator/validators/json/schemas/microscopy.json b/bids-validator/validators/json/schemas/microscopy.json index 5f9e2e64e..687262979 100644 --- a/bids-validator/validators/json/schemas/microscopy.json +++ b/bids-validator/validators/json/schemas/microscopy.json @@ -11,8 +11,8 @@ "InstitutionalDepartmentName": { "type": "string", "minLength": 1 }, "BodyPart": { "type": "string", "minLength": 1 }, "BodyPartDetails": { "type": "string", "minLength": 1 }, - "BodyPartDetailsOntology": { "type": "string", "minLength": 1 }, - "SampleEnvironment": { "type": "string", "minLength": 1 }, + "BodyPartDetailsOntology": { "type": "string", "minLength": 1, "format": "uri" }, + "SampleEnvironment": { "type": "string", "minLength": 1, "enum": ["in vivo", "ex vivo", "in vitro"] }, "SampleEmbedding": { "type": "string", "minLength": 1 }, "SampleFixation": { "type": "string", "minLength": 1 }, "SampleStaining": { @@ -65,12 +65,13 @@ "InstitutionName", "InstitutionAddress", "InstitutionalDepartmentName", - "BodyPart", + "BodyPartDetails", "Environment", "SampleStaining", "SamplePrimaryAntibody", - "SampleSecondaryAntibody" + "SampleSecondaryAntibody", + "ChunkTransformationMatrix" ], "dependencies": { From da45e7ba896db03abe69def5c89c3a58887d2499 Mon Sep 17 00:00:00 2001 From: Yuan Wang Date: Sun, 5 Dec 2021 15:43:01 -0700 Subject: [PATCH 051/101] Resolve comments from PR --- .../bids_validator/rules/file_level_rules.json | 14 ++++---------- .../bids_validator/rules/session_level_rules.json | 10 ++-------- .../bids_validator/rules/top_level_rules.json | 6 ------ .../validators/json/schemas/microscopy.json | 2 +- 4 files changed, 7 insertions(+), 25 deletions(-) diff --git a/bids-validator/bids_validator/rules/file_level_rules.json b/bids-validator/bids_validator/rules/file_level_rules.json index 3cc608ebf..c77403a2c 100644 --- a/bids-validator/bids_validator/rules/file_level_rules.json +++ b/bids-validator/bids_validator/rules/file_level_rules.json @@ -377,9 +377,7 @@ "_channels\\.tsv", "_eeg\\.json", "_coordsystem\\.json", - "_photo\\.jpg", - "_photo\\.png", - "_photo\\.tif" + "_photo\\.jpg" ], "@@@_cont_ext_@@@": [ "_physio\\.tsv\\.gz", @@ -453,9 +451,7 @@ "_channels\\.tsv", "_ieeg\\.json", "_coordsystem\\.json", - "_photo\\.jpg", - "_photo\\.png", - "_photo\\.tif" + "_photo\\.jpg" ], "@@@_cont_ext_@@@": [ "_physio\\.tsv\\.gz", @@ -485,8 +481,6 @@ "_meg\\.json", "_coordsystem\\.json", "_photo\\.jpg", - "_photo\\.png", - "_photo\\.tif", "_headshape\\.pos", "_markers\\.(?:mrk|sqd)" ], @@ -532,7 +526,7 @@ }, "microscopy": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?microscopy[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_sample-[a-zA-Z0-9]+)(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_chunk-[a-zA-Z0-9]+)?(((@@@_microscopy_type_@@@)(@@@_microscopy_ext_@@@))(@@@_photo_ext_@@@))$", + "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?micr[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_sample-[a-zA-Z0-9]+)(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_chunk-[a-zA-Z0-9]+)?(((@@@_microscopy_type_@@@)(@@@_microscopy_ext_@@@))(@@@_photo_ext_@@@))$", "tokens": { "@@@_microscopy_type_@@@": [ "_TEM", @@ -567,7 +561,7 @@ } }, "microscopy_json": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?microscopy[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_sample-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_chunk-[a-zA-Z0-9]+)?(@@@_microscopy_type_@@@)\\.json$", + "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?micr[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_sample-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_chunk-[a-zA-Z0-9]+)?(@@@_microscopy_type_@@@)\\.json$", "tokens": { "@@@_microscopy_type_@@@": [ "_TEM", diff --git a/bids-validator/bids_validator/rules/session_level_rules.json b/bids-validator/bids_validator/rules/session_level_rules.json index bdf7c04ea..f3cfc3779 100644 --- a/bids-validator/bids_validator/rules/session_level_rules.json +++ b/bids-validator/bids_validator/rules/session_level_rules.json @@ -77,8 +77,6 @@ "_meg.json", "_coordsystem.json", "_photo.jpg", - "_photo.png", - "_photo.tif", "_headshape.pos" ] } @@ -95,9 +93,7 @@ "_electrodes.json", "_eeg.json", "_coordsystem.json", - "_photo.jpg", - "_photo.png", - "_photo.tif" + "_photo.jpg" ], "@@@_eeg_space_@@@": [ "Other", @@ -160,9 +156,7 @@ "_electrodes.json", "_ieeg.json", "_coordsystem.json", - "_photo.jpg", - "_photo.png", - "_photo.tif" + "_photo.jpg" ], "@@@_ieeg_space_@@@": [ "Other", diff --git a/bids-validator/bids_validator/rules/top_level_rules.json b/bids-validator/bids_validator/rules/top_level_rules.json index 871f5331e..cfffc6ea3 100644 --- a/bids-validator/bids_validator/rules/top_level_rules.json +++ b/bids-validator/bids_validator/rules/top_level_rules.json @@ -95,8 +95,6 @@ "_electrodes\\.tsv", "_electrodes\\.json", "_photo\\.jpg", - "_photo\\.png", - "_photo\\.tif", "_coordsystem\\.json" ] } @@ -111,8 +109,6 @@ "_electrodes\\.tsv", "_electrodes\\.json", "_photo\\.jpg", - "_photo\\.png", - "_photo\\.tif", "_coordsystem\\.json" ] } @@ -125,8 +121,6 @@ "_channels\\.tsv", "_channels\\.json", "_photo\\.jpg", - "_photo\\.png", - "_photo\\.tif", "_coordsystem\\.json" ] } diff --git a/bids-validator/validators/json/schemas/microscopy.json b/bids-validator/validators/json/schemas/microscopy.json index 687262979..76cd57b2f 100644 --- a/bids-validator/validators/json/schemas/microscopy.json +++ b/bids-validator/validators/json/schemas/microscopy.json @@ -67,7 +67,7 @@ "InstitutionalDepartmentName", "BodyPart", "BodyPartDetails", - "Environment", + "SampleEnvironment", "SampleStaining", "SamplePrimaryAntibody", "SampleSecondaryAntibody", From 39543b711f78b06cf35ec60d1c8abd3a07459042 Mon Sep 17 00:00:00 2001 From: Yuan Wang Date: Sun, 5 Dec 2021 23:38:59 -0700 Subject: [PATCH 052/101] Fix microscopy regexp --- bids-validator/bids_validator/rules/file_level_rules.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bids-validator/bids_validator/rules/file_level_rules.json b/bids-validator/bids_validator/rules/file_level_rules.json index c77403a2c..f4df738b7 100644 --- a/bids-validator/bids_validator/rules/file_level_rules.json +++ b/bids-validator/bids_validator/rules/file_level_rules.json @@ -526,7 +526,7 @@ }, "microscopy": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?micr[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_sample-[a-zA-Z0-9]+)(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_chunk-[a-zA-Z0-9]+)?(((@@@_microscopy_type_@@@)(@@@_microscopy_ext_@@@))(@@@_photo_ext_@@@))$", + "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?micr[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_sample-[a-zA-Z0-9]+)(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_chunk-[a-zA-Z0-9]+)?(((@@@_microscopy_type_@@@)(@@@_microscopy_ext_@@@))|(@@@_photo_ext_@@@))$", "tokens": { "@@@_microscopy_type_@@@": [ "_TEM", From e48f955ec88f1d13f6281ea0ca6fe8af4a087f36 Mon Sep 17 00:00:00 2001 From: Yuan Wang Date: Mon, 6 Dec 2021 23:38:49 -0700 Subject: [PATCH 053/101] Remove "ChunkTransformationMatrix" from the recommended list --- bids-validator/validators/json/schemas/microscopy.json | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/bids-validator/validators/json/schemas/microscopy.json b/bids-validator/validators/json/schemas/microscopy.json index 76cd57b2f..3ac2ce639 100644 --- a/bids-validator/validators/json/schemas/microscopy.json +++ b/bids-validator/validators/json/schemas/microscopy.json @@ -70,8 +70,7 @@ "SampleEnvironment", "SampleStaining", "SamplePrimaryAntibody", - "SampleSecondaryAntibody", - "ChunkTransformationMatrix" + "SampleSecondaryAntibody" ], "dependencies": { From fbc6fc7041fc3598047d68ec8b8e130cd54242fc Mon Sep 17 00:00:00 2001 From: Yuan Wang Date: Tue, 7 Dec 2021 17:44:02 -0700 Subject: [PATCH 054/101] Add regexp for _photo ext in microscopy --- .../bids_validator/rules/file_level_rules.json | 9 +++++++-- bids-validator/utils/type.js | 10 +++++++--- 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/bids-validator/bids_validator/rules/file_level_rules.json b/bids-validator/bids_validator/rules/file_level_rules.json index f4df738b7..957d6dcfe 100644 --- a/bids-validator/bids_validator/rules/file_level_rules.json +++ b/bids-validator/bids_validator/rules/file_level_rules.json @@ -526,7 +526,7 @@ }, "microscopy": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?micr[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_sample-[a-zA-Z0-9]+)(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_chunk-[a-zA-Z0-9]+)?(((@@@_microscopy_type_@@@)(@@@_microscopy_ext_@@@))|(@@@_photo_ext_@@@))$", + "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?micr[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_sample-[a-zA-Z0-9]+)(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_chunk-[a-zA-Z0-9]+)?((@@@_microscopy_type_@@@)(@@@_microscopy_ext_@@@))$", "tokens": { "@@@_microscopy_type_@@@": [ "_TEM", @@ -552,7 +552,12 @@ ".ome\\.btf", ".tif", ".png" - ], + ] + } + }, + "microscopy_photo": { + "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?micr[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_sample-[a-zA-Z0-9]+)(?:_acq-[a-zA-Z0-9]+)?(@@@_photo_ext_@@@)$", + "tokens":{ "@@@_photo_ext_@@@": [ "_photo\\.jpg", "_photo\\.png", diff --git a/bids-validator/utils/type.js b/bids-validator/utils/type.js index 5e4a988f0..a2cb15203 100644 --- a/bids-validator/utils/type.js +++ b/bids-validator/utils/type.js @@ -60,6 +60,7 @@ const stimuliData = buildRegExp(file_level_rules.stimuli) const petData = buildRegExp(file_level_rules.pet) const petBlood = buildRegExp(file_level_rules.pet_blood) const microscopyData = buildRegExp(file_level_rules.microscopy) +const microscopyPhotoData = buildRegExp(file_level_rules.microscopy_photo) const microscopyJSON = buildRegExp(file_level_rules.microscopy_json) // Phenotypic data const phenotypicData = buildRegExp(phenotypic_rules.phenotypic_data) @@ -349,7 +350,10 @@ export default { }, isMicroscopy: function(path) { - return conditionalMatch(microscopyData, path) + return ( + conditionalMatch(microscopyData, path) || + conditionalMatch(microscopyPhotoData, path) + ) }, isMicroscopyJSON: function(path) { @@ -390,10 +394,10 @@ export default { }, checkType(obj, typeString) { - if (typeString == 'number') { + if (typeString === 'number') { return !isNaN(parseFloat(obj)) && isFinite(obj) } else { - return typeof obj == typeString + return typeof obj === typeString } }, From 6c27e349dfff4d72ac424cf78517096ec9977451 Mon Sep 17 00:00:00 2001 From: Yuan Wang Date: Wed, 8 Dec 2021 12:32:21 -0700 Subject: [PATCH 055/101] Revert changes --- bids-validator/utils/type.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bids-validator/utils/type.js b/bids-validator/utils/type.js index a2cb15203..0c15168e5 100644 --- a/bids-validator/utils/type.js +++ b/bids-validator/utils/type.js @@ -394,10 +394,10 @@ export default { }, checkType(obj, typeString) { - if (typeString === 'number') { + if (typeString == 'number') { return !isNaN(parseFloat(obj)) && isFinite(obj) } else { - return typeof obj === typeString + return typeof obj == typeString } }, From 270906e9ca95f117d9781c1ce416e7f8e9dc3dd0 Mon Sep 17 00:00:00 2001 From: Yuan Wang Date: Sun, 19 Dec 2021 23:05:32 -0700 Subject: [PATCH 056/101] 1. Add Check for consistency for PixelSize between JSON and OME-TIFF metadata 2. Add Check for consistency with optional OME-TIFF metadata if present for Immersion, NumericalAperture and Magnification 3. Add check for ChunkTransformationMatrix field in the JSON file --- bids-validator/utils/issues/list.js | 50 +++- .../validators/bids/checkConsistency.js | 228 ++++++++++++++++++ bids-validator/validators/bids/fullTest.js | 11 +- .../validators/bids/groupFileTypes.js | 4 + package-lock.json | 36 +++ 5 files changed, 318 insertions(+), 11 deletions(-) create mode 100644 bids-validator/validators/bids/checkConsistency.js diff --git a/bids-validator/utils/issues/list.js b/bids-validator/utils/issues/list.js index 52e3b8cf9..c6b4b41d7 100644 --- a/bids-validator/utils/issues/list.js +++ b/bids-validator/utils/issues/list.js @@ -1057,31 +1057,63 @@ export default { 216: { key: 'SAMPLE_ID_COLUMN', severity: 'error', - reason: - "Samples .tsv files must have a 'sample_id' column.", + reason: "Samples .tsv files must have a 'sample_id' column.", }, 217: { key: 'PARTICIPANT_ID_COLUMN', severity: 'error', - reason: - "Samples .tsv files must have a 'participant_id' column.", + reason: "Samples .tsv files must have a 'participant_id' column.", }, 218: { key: 'SAMPLE_TYPE_COLUMN', severity: 'error', - reason: - "Samples .tsv files must have a 'sample_type' column.", + reason: "Samples .tsv files must have a 'sample_type' column.", }, 219: { key: 'SAMPLE_TYPE_VALUE', severity: 'error', reason: - "sample_type MUST consist of one of the following values: cell line, in vitro differentiated cells, primary cell, cell-free sample, cloning host, tissue, whole organisms, organoid or technical sample.", + 'sample_type MUST consist of one of the following values: cell line, in vitro differentiated cells, primary cell, cell-free sample, cloning host, tissue, whole organisms, organoid or technical sample.', }, 220: { key: 'SAMPLE_ID_DUPLICATE', severity: 'error', reason: - "Each sample from a same subject MUST be described by one and only one row.", - } + 'Each sample from a same subject MUST be described by one and only one row.', + }, + 221: { + key: 'PIXEL_SIZE_INCONSISTENT', + severity: 'error', + reason: + 'PixelSize need to be consistent with PhysicalSizeX, PhysicalSizeY and PhysicalSizeZ OME metadata fields', + }, + 222: { + key: 'INVALID_PIXEL_SIZE_UNIT', + severity: 'error', + reason: 'PixelSize consistency is only validated for "mm", "µm" and "nm".', + }, + 223: { + key: 'NO_VALID_JSON_FILES', + severity: 'error', + reason: + 'No enough JSON files used to check PixelSize OME metadata fields consistency', + }, + 224: { + key: 'CHUNK_TRANSFORMATION_MATRIX_MISSING', + severity: 'warning', + reason: + "It is recommended to define 'ChunkTransformationMatrix' for this file.", + }, + 225: { + key: 'OPTIONAL_FIELDS_NON_EXIST', + severity: 'error', + reason: + 'The optional fields are present in the JSON file but not found in the OME-TIFF file', + }, + 226: { + key: 'OPTIONAL_FIELDS_INCONSISTENT', + severity: 'error', + reason: + 'The optional fields are not consistent between the OME-TIFF files and JSON files', + }, } diff --git a/bids-validator/validators/bids/checkConsistency.js b/bids-validator/validators/bids/checkConsistency.js new file mode 100644 index 000000000..c1a00d5e9 --- /dev/null +++ b/bids-validator/validators/bids/checkConsistency.js @@ -0,0 +1,228 @@ +import utils from '../../utils' +import isNode from '../../utils/isNode' +import ExifReader from 'exifreader' +const xml2js = require('xml2js') +const Issue = require('../../utils').issues.Issue + +const getOMETiffData = async omeFile => { + let tags + if (isNode) { + tags = await ExifReader.load(omeFile.path) + } else { + const arrayBuffer = await toArrayBuffer(omeFile) + tags = await ExifReader.load(arrayBuffer) + } + let xml = tags['ImageDescription']['description'] + let parser = new xml2js.Parser() + return await parser.parseStringPromise(xml) +} + +const toArrayBuffer = async file => { + return new Promise((resolve, reject) => { + try { + const reader = new FileReader() + reader.onload = event => { + resolve(event.target.result) + } + + reader.readAsArrayBuffer(file) + } catch (e) { + reject(e) + } + }) +} + +const convertFactor = (omeUnit, jsonUnit) => { + if (omeUnit === jsonUnit || (omeUnit === 'µm' && jsonUnit === 'um')) return 1 + + if (jsonUnit === 'um') { + if (omeUnit === 'mm') { + return 1000 + } else if (omeUnit === 'nm') { + return 0.001 + } + } else if (jsonUnit === 'mm') { + if (omeUnit === 'µm') { + return 0.001 + } else if (omeUnit === 'nm') { + return 0.000001 + } + } else if (jsonUnit === 'nm') { + if (omeUnit === 'mm') { + return 1000000 + } else if (omeUnit === 'µm') { + return 1000 + } + } +} + +const getMatchingComponents = (omeFiles, jsonFiles) => { + let components = [] + omeFiles.forEach(omeFile => { + let possibleJsonPath = omeFile.relativePath + .replace('.tif', '') + .replace('.ome', '.json') + + let potentialSidecars = utils.files.potentialLocations(possibleJsonPath) + + // Find the path before filename + let regExp = new RegExp('(.*/).*') + let preFilename = regExp.exec(possibleJsonPath)[1] + + // Retrieve all json files at the same path with ome-tiff files + let jsonPaths = potentialSidecars.filter(path => { + let jsonpath = regExp.exec(path)[1] + return jsonpath === preFilename + }) + + // if possible json paths are not empty + jsonFiles.forEach(jsonFile => { + // if possible json file exists + if (jsonPaths.includes(jsonFile.relativePath)) { + components.push({ + omeFile: omeFile, + jsonFilePath: jsonFile.relativePath, + }) + } + }) + }) + + return components +} + +const checkConsistency = (omeFiles, jsonFiles, jsonContentsDict) => { + let issues = [] + + let components = getMatchingComponents(omeFiles, jsonFiles) + + // if at least one ome-tiff file has no corresponding json file + if (components.length !== omeFiles.length) { + issues.push(new Issue({ code: 223 })) + } + + const omeIssuesPromises = components.map(component => { + return utils.limit( + () => + new Promise(async resolve => { + let jsonData = jsonContentsDict[component.jsonFilePath] + let omeData = await getOMETiffData(component.omeFile) + let optionalFieldsIssues = await validateOptionalFields( + omeData, + jsonData, + ) + let pixelSizeIssues = await validatePixelSize(omeData, jsonData) + let matrixIssues = validateChunkTransformationMatrix( + component.jsonFilePath, + component.omeFile.path, + jsonData, + ) + issues = issues + .concat(optionalFieldsIssues) + .concat(pixelSizeIssues) + .concat(matrixIssues) + return resolve() + }), + ) + }) + + return new Promise(resolve => + Promise.all(omeIssuesPromises).then(() => resolve(issues)), + ) +} + +const validateOptionalFields = async (omeData, jsonData) => { + let issues = [] + + let fields = { + Immersion: 'Immersion', + NumericalAperture: 'LensNA', + NominalMagnification: 'Magnification', + } + + if ( + omeData['OME']['Instrument'] && + omeData['OME']['Instrument'][0]['Objective'] + ) { + let objective = omeData['OME']['Instrument'][0]['Objective'][0]['$'] + for (let field in fields) { + let property = fields[field] + if (jsonData.hasOwnProperty(field) && !objective[property]) { + issues.push(new Issue({ code: 225 })) + } else if (jsonData.hasOwnProperty(field) && objective[property]) { + if (objective[property] != jsonData[field]) { + issues.push(new Issue({ code: 226 })) + } + } + } + } + + return issues +} + +const validateChunkTransformationMatrix = ( + jsonFilePath, + omeFilePath, + jsonData, +) => { + let issues = [] + + /*if chunk- is used either in the filenames of + ome-tiff or JSON files, 'ChunkTransformationMatrix' is recommended*/ + let regex = new RegExp('_chunk-[a-zA-Z0-9]+') + + if (regex.exec(jsonFilePath) || regex.exec(omeFilePath)) { + if (!jsonData.hasOwnProperty('ChunkTransformationMatrix')) { + issues.push(new Issue({ code: 224 })) + } + } + + return issues +} + +const validatePixelSize = async (omeData, jsonData) => { + let issues = [] + let validUnits = ['um', 'µm', 'nm', 'mm'] + + const PhysicalSizeX = + omeData['OME']['Image'][0]['Pixels'][0]['$']['PhysicalSizeX'] + const physicalSizeXUnit = + omeData['OME']['Image'][0]['Pixels'][0]['$']['PhysicalSizeXUnit'] + const PhysicalSizeY = + omeData['OME']['Image'][0]['Pixels'][0]['$']['PhysicalSizeY'] + const physicalSizeYUnit = + omeData['OME']['Image'][0]['Pixels'][0]['$']['PhysicalSizeYUnit'] + const PhysicalSizeZ = + omeData['OME']['Image'][0]['Pixels'][0]['$']['PhysicalSizeZ'] + const physicalSizeZUnit = + omeData['OME']['Image'][0]['Pixels'][0]['$']['PhysicalSizeZUnit'] + + let pixelSize = jsonData['PixelSize'] + let physicalSizeUnit = jsonData['PixelSizeUnits'] + + let unitsPendToCheck = [ + physicalSizeXUnit, + physicalSizeYUnit, + physicalSizeZUnit, + ] + + unitsPendToCheck.forEach(unit => { + if (!validUnits.includes(unit)) { + issues.push(new Issue({ code: 222 })) + } + }) + + let factorX = convertFactor(physicalSizeXUnit, physicalSizeUnit) + let factorY = convertFactor(physicalSizeYUnit, physicalSizeUnit) + let factorZ = convertFactor(physicalSizeZUnit, physicalSizeUnit) + + if ( + PhysicalSizeX * factorX !== pixelSize[0] || + PhysicalSizeY * factorY !== pixelSize[1] || + PhysicalSizeZ * factorZ !== pixelSize[2] + ) { + issues.push(new Issue({ code: 221 })) + } + + return issues +} +export default checkConsistency diff --git a/bids-validator/validators/bids/fullTest.js b/bids-validator/validators/bids/fullTest.js index 138e4ed4b..9e6ca510d 100644 --- a/bids-validator/validators/bids/fullTest.js +++ b/bids-validator/validators/bids/fullTest.js @@ -19,11 +19,12 @@ import checkSamples from './checkSamples' import validateMisc from '../../utils/files/validateMisc' import collectSubjectMetadata from '../../utils/summary/collectSubjectMetadata' import collectPetFields from '../../utils/summary/collectPetFields' +import checkConsistency from './checkConsistency' /** * Full Test * - * Takes on an array of files, callback, and boolean inidicating if git-annex is used. + * Takes on an array of files, callback, and boolean indicating if git-annex is used. * Starts the validation process for a BIDS package. */ const fullTest = (fileList, options, annexed, dir, schema, callback) => { @@ -145,12 +146,18 @@ const fullTest = (fileList, options, annexed, dir, schema, callback) => { if (summary.modalities.includes('Microscopy')) { const samplesIssues = checkSamples(fileList) self.issues = self.issues.concat(samplesIssues) - } + } // Validate json files and contents return json.validate(jsonFiles, fileList, jsonContentsDict, summary) }) .then(jsonIssues => { self.issues = self.issues.concat(jsonIssues) + + // ome-tiff consistency check + return checkConsistency(files.ome, files.json, jsonContentsDict) + }) + .then(omeIssues => { + self.issues = self.issues.concat(omeIssues) // Nifti validation return NIFTI.validate( files.nifti, diff --git a/bids-validator/validators/bids/groupFileTypes.js b/bids-validator/validators/bids/groupFileTypes.js index 42c4a1bb3..44812c498 100644 --- a/bids-validator/validators/bids/groupFileTypes.js +++ b/bids-validator/validators/bids/groupFileTypes.js @@ -11,6 +11,7 @@ const groupFileTypes = (fileList, options) => { bvec: [], contRecord: [], invalid: [], + ome: [], // used to check all files not already passed through testFile() misc: [], } @@ -52,6 +53,9 @@ const sortFiles = (fileList, options, files) => { files.misc.push(file) } else if (ofType(filename, 'tsv.gz')) { files.contRecord.push(file) + } else if (ofType(filename, 'ome.tif')) { + // collect ome-tiff + files.ome.push(file) } else { files.misc.push(file) } diff --git a/package-lock.json b/package-lock.json index bd189543c..0e921ed78 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,6 +9,10 @@ "bids-validator", "bids-validator-web" ], + "dependencies": { + "exifreader": "^4.1.0", + "xml2js": "^0.4.23" + }, "devDependencies": { "@babel/core": "^7.7.2", "@babel/preset-env": "^7.7.1", @@ -6568,6 +6572,15 @@ "integrity": "sha512-7tFImggNeNBVMsn0vLrpn1H1uPrUBdnARPTpZoitY37ZrdJREzf7I16tMrlK3hen349gr1NYh8CmZQa7CTG6Aw==", "dev": true }, + "node_modules/@xmldom/xmldom": { + "version": "0.7.5", + "resolved": "https://registry.npmjs.org/@xmldom/xmldom/-/xmldom-0.7.5.tgz", + "integrity": "sha512-V3BIhmY36fXZ1OtVcI9W+FxQqxVLsPKcNjWigIaa81dLC9IolJl5Mt4Cvhmr0flUnjSpTdrbMTSbXqYqV5dT6A==", + "optional": true, + "engines": { + "node": ">=10.0.0" + } + }, "node_modules/abab": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/abab/-/abab-2.0.5.tgz", @@ -9833,6 +9846,15 @@ "url": "https://github.com/sindresorhus/execa?sponsor=1" } }, + "node_modules/exifreader": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/exifreader/-/exifreader-4.1.0.tgz", + "integrity": "sha512-LzTW96ubaHRSWVD6bgANpZgWGHdtA/jsIdVjFVhDDN6k60wid8U6b3cIWSGTfRePjZlwvyt4nt12bIQ5ywUrBw==", + "hasInstallScript": true, + "optionalDependencies": { + "@xmldom/xmldom": "^0.7.5" + } + }, "node_modules/exit": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz", @@ -27179,6 +27201,12 @@ "integrity": "sha512-7tFImggNeNBVMsn0vLrpn1H1uPrUBdnARPTpZoitY37ZrdJREzf7I16tMrlK3hen349gr1NYh8CmZQa7CTG6Aw==", "dev": true }, + "@xmldom/xmldom": { + "version": "0.7.5", + "resolved": "https://registry.npmjs.org/@xmldom/xmldom/-/xmldom-0.7.5.tgz", + "integrity": "sha512-V3BIhmY36fXZ1OtVcI9W+FxQqxVLsPKcNjWigIaa81dLC9IolJl5Mt4Cvhmr0flUnjSpTdrbMTSbXqYqV5dT6A==", + "optional": true + }, "abab": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/abab/-/abab-2.0.5.tgz", @@ -29895,6 +29923,14 @@ "strip-final-newline": "^2.0.0" } }, + "exifreader": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/exifreader/-/exifreader-4.1.0.tgz", + "integrity": "sha512-LzTW96ubaHRSWVD6bgANpZgWGHdtA/jsIdVjFVhDDN6k60wid8U6b3cIWSGTfRePjZlwvyt4nt12bIQ5ywUrBw==", + "requires": { + "@xmldom/xmldom": "^0.7.5" + } + }, "exit": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz", From 85b9ef9a163f9d390126473c6259e8de75d50ccd Mon Sep 17 00:00:00 2001 From: Yuan Wang Date: Tue, 21 Dec 2021 15:47:33 -0700 Subject: [PATCH 057/101] Resolve comments --- bids-validator/utils/issues/list.js | 10 ++-------- bids-validator/validators/bids/checkConsistency.js | 11 ++++++----- bids-validator/validators/bids/groupFileTypes.js | 2 +- 3 files changed, 9 insertions(+), 14 deletions(-) diff --git a/bids-validator/utils/issues/list.js b/bids-validator/utils/issues/list.js index c6b4b41d7..375b47042 100644 --- a/bids-validator/utils/issues/list.js +++ b/bids-validator/utils/issues/list.js @@ -1089,7 +1089,7 @@ export default { }, 222: { key: 'INVALID_PIXEL_SIZE_UNIT', - severity: 'error', + severity: 'warning', reason: 'PixelSize consistency is only validated for "mm", "µm" and "nm".', }, 223: { @@ -1105,13 +1105,7 @@ export default { "It is recommended to define 'ChunkTransformationMatrix' for this file.", }, 225: { - key: 'OPTIONAL_FIELDS_NON_EXIST', - severity: 'error', - reason: - 'The optional fields are present in the JSON file but not found in the OME-TIFF file', - }, - 226: { - key: 'OPTIONAL_FIELDS_INCONSISTENT', + key: 'OPTIONAL_FIELD_INCONSISTENT', severity: 'error', reason: 'The optional fields are not consistent between the OME-TIFF files and JSON files', diff --git a/bids-validator/validators/bids/checkConsistency.js b/bids-validator/validators/bids/checkConsistency.js index c1a00d5e9..339367412 100644 --- a/bids-validator/validators/bids/checkConsistency.js +++ b/bids-validator/validators/bids/checkConsistency.js @@ -136,7 +136,7 @@ const validateOptionalFields = async (omeData, jsonData) => { let fields = { Immersion: 'Immersion', NumericalAperture: 'LensNA', - NominalMagnification: 'Magnification', + Magnification: 'NominalMagnification', } if ( @@ -146,11 +146,9 @@ const validateOptionalFields = async (omeData, jsonData) => { let objective = omeData['OME']['Instrument'][0]['Objective'][0]['$'] for (let field in fields) { let property = fields[field] - if (jsonData.hasOwnProperty(field) && !objective[property]) { - issues.push(new Issue({ code: 225 })) - } else if (jsonData.hasOwnProperty(field) && objective[property]) { + if (jsonData.hasOwnProperty(field) && objective[property]) { if (objective[property] != jsonData[field]) { - issues.push(new Issue({ code: 226 })) + issues.push(new Issue({ code: 225 })) } } } @@ -211,6 +209,9 @@ const validatePixelSize = async (omeData, jsonData) => { } }) + // if any physicalSizeUnit is not valid, skip the consistency check + if (issues) return issues + let factorX = convertFactor(physicalSizeXUnit, physicalSizeUnit) let factorY = convertFactor(physicalSizeYUnit, physicalSizeUnit) let factorZ = convertFactor(physicalSizeZUnit, physicalSizeUnit) diff --git a/bids-validator/validators/bids/groupFileTypes.js b/bids-validator/validators/bids/groupFileTypes.js index 44812c498..eb4f4ff1b 100644 --- a/bids-validator/validators/bids/groupFileTypes.js +++ b/bids-validator/validators/bids/groupFileTypes.js @@ -53,7 +53,7 @@ const sortFiles = (fileList, options, files) => { files.misc.push(file) } else if (ofType(filename, 'tsv.gz')) { files.contRecord.push(file) - } else if (ofType(filename, 'ome.tif')) { + } else if (ofType(filename, 'ome.tif') || ofType(filename, 'ome.btf')) { // collect ome-tiff files.ome.push(file) } else { From 439f5c14ef3b8791c4d63d20315a5166e4e1ca06 Mon Sep 17 00:00:00 2001 From: Yuan Wang Date: Wed, 22 Dec 2021 18:35:51 -0700 Subject: [PATCH 058/101] Resolve comments --- bids-validator/utils/issues/list.js | 16 ++--- .../validators/bids/checkConsistency.js | 70 +++++++++---------- bids-validator/validators/bids/fullTest.js | 2 +- 3 files changed, 40 insertions(+), 48 deletions(-) diff --git a/bids-validator/utils/issues/list.js b/bids-validator/utils/issues/list.js index 375b47042..c687ddf65 100644 --- a/bids-validator/utils/issues/list.js +++ b/bids-validator/utils/issues/list.js @@ -1093,21 +1093,19 @@ export default { reason: 'PixelSize consistency is only validated for "mm", "µm" and "nm".', }, 223: { - key: 'NO_VALID_JSON_FILES', - severity: 'error', - reason: - 'No enough JSON files used to check PixelSize OME metadata fields consistency', - }, - 224: { key: 'CHUNK_TRANSFORMATION_MATRIX_MISSING', severity: 'warning', reason: "It is recommended to define 'ChunkTransformationMatrix' for this file.", }, - 225: { + 224: { key: 'OPTIONAL_FIELD_INCONSISTENT', severity: 'error', - reason: - 'The optional fields are not consistent between the OME-TIFF files and JSON files', + reason: 'Optional JSON field is not consistent with the OME-TIFF metadata', + }, + 225: { + key: 'NO_VALID_JSON', + severity: 'error', + reason: 'No valid JSON file found for this file', }, } diff --git a/bids-validator/validators/bids/checkConsistency.js b/bids-validator/validators/bids/checkConsistency.js index 339367412..9cda92b2b 100644 --- a/bids-validator/validators/bids/checkConsistency.js +++ b/bids-validator/validators/bids/checkConsistency.js @@ -56,7 +56,7 @@ const convertFactor = (omeUnit, jsonUnit) => { } } -const getMatchingComponents = (omeFiles, jsonFiles) => { +const getMatchingComponents = (omeFiles, jsonContentsDict) => { let components = [] omeFiles.forEach(omeFile => { let possibleJsonPath = omeFile.relativePath @@ -65,35 +65,24 @@ const getMatchingComponents = (omeFiles, jsonFiles) => { let potentialSidecars = utils.files.potentialLocations(possibleJsonPath) - // Find the path before filename - let regExp = new RegExp('(.*/).*') - let preFilename = regExp.exec(possibleJsonPath)[1] - - // Retrieve all json files at the same path with ome-tiff files - let jsonPaths = potentialSidecars.filter(path => { - let jsonpath = regExp.exec(path)[1] - return jsonpath === preFilename - }) + const mergedDictionary = utils.files.generateMergedSidecarDict( + potentialSidecars, + jsonContentsDict, + ) - // if possible json paths are not empty - jsonFiles.forEach(jsonFile => { - // if possible json file exists - if (jsonPaths.includes(jsonFile.relativePath)) { - components.push({ - omeFile: omeFile, - jsonFilePath: jsonFile.relativePath, - }) - } + components.push({ + omeFile: omeFile, + jsonData: mergedDictionary, }) }) return components } -const checkConsistency = (omeFiles, jsonFiles, jsonContentsDict) => { +const checkConsistency = (omeFiles, jsonContentsDict) => { let issues = [] - let components = getMatchingComponents(omeFiles, jsonFiles) + let components = getMatchingComponents(omeFiles, jsonContentsDict) // if at least one ome-tiff file has no corresponding json file if (components.length !== omeFiles.length) { @@ -104,22 +93,15 @@ const checkConsistency = (omeFiles, jsonFiles, jsonContentsDict) => { return utils.limit( () => new Promise(async resolve => { - let jsonData = jsonContentsDict[component.jsonFilePath] + let jsonData = component.jsonData let omeData = await getOMETiffData(component.omeFile) let optionalFieldsIssues = await validateOptionalFields( + component.omeFile.relativePath, omeData, jsonData, ) let pixelSizeIssues = await validatePixelSize(omeData, jsonData) - let matrixIssues = validateChunkTransformationMatrix( - component.jsonFilePath, - component.omeFile.path, - jsonData, - ) - issues = issues - .concat(optionalFieldsIssues) - .concat(pixelSizeIssues) - .concat(matrixIssues) + issues = issues.concat(optionalFieldsIssues).concat(pixelSizeIssues) return resolve() }), ) @@ -130,7 +112,7 @@ const checkConsistency = (omeFiles, jsonFiles, jsonContentsDict) => { ) } -const validateOptionalFields = async (omeData, jsonData) => { +const validateOptionalFields = async (omePath, omeData, jsonData) => { let issues = [] let fields = { @@ -148,7 +130,16 @@ const validateOptionalFields = async (omeData, jsonData) => { let property = fields[field] if (jsonData.hasOwnProperty(field) && objective[property]) { if (objective[property] != jsonData[field]) { - issues.push(new Issue({ code: 225 })) + issues.push( + new Issue({ + file: { + relativePath: omePath, + path: omePath, + }, + evidence: `JSON field '${field}' is inconsistent`, + code: 224, + }), + ) } } } @@ -170,7 +161,7 @@ const validateChunkTransformationMatrix = ( if (regex.exec(jsonFilePath) || regex.exec(omeFilePath)) { if (!jsonData.hasOwnProperty('ChunkTransformationMatrix')) { - issues.push(new Issue({ code: 224 })) + issues.push(new Issue({ code: 223 })) } } @@ -194,8 +185,8 @@ const validatePixelSize = async (omeData, jsonData) => { const physicalSizeZUnit = omeData['OME']['Image'][0]['Pixels'][0]['$']['PhysicalSizeZUnit'] - let pixelSize = jsonData['PixelSize'] - let physicalSizeUnit = jsonData['PixelSizeUnits'] + // if no corresponding json file + if (Object.keys(jsonData).length === 0) issues.push(new Issue({ code: 225 })) let unitsPendToCheck = [ physicalSizeXUnit, @@ -209,8 +200,11 @@ const validatePixelSize = async (omeData, jsonData) => { } }) - // if any physicalSizeUnit is not valid, skip the consistency check - if (issues) return issues + // if any physicalSizeUnit is not valid or no valid json file, skip the consistency check + if (issues.length > 0) return issues + + let pixelSize = jsonData['PixelSize'] + let physicalSizeUnit = jsonData['PixelSizeUnits'] let factorX = convertFactor(physicalSizeXUnit, physicalSizeUnit) let factorY = convertFactor(physicalSizeYUnit, physicalSizeUnit) diff --git a/bids-validator/validators/bids/fullTest.js b/bids-validator/validators/bids/fullTest.js index 9e6ca510d..e5a29fb69 100644 --- a/bids-validator/validators/bids/fullTest.js +++ b/bids-validator/validators/bids/fullTest.js @@ -154,7 +154,7 @@ const fullTest = (fileList, options, annexed, dir, schema, callback) => { self.issues = self.issues.concat(jsonIssues) // ome-tiff consistency check - return checkConsistency(files.ome, files.json, jsonContentsDict) + return checkConsistency(files.ome, jsonContentsDict) }) .then(omeIssues => { self.issues = self.issues.concat(omeIssues) From 831e9413955d4caed6ac61fdfb180a4816781c36 Mon Sep 17 00:00:00 2001 From: Yuan Wang Date: Thu, 23 Dec 2021 16:06:44 -0700 Subject: [PATCH 059/101] Resolve comments --- .../files/generateMergedSidecarWithPath.js | 26 ++ bids-validator/utils/files/index.js | 4 + bids-validator/utils/files/readOMEFile.js | 33 +++ .../validators/bids/checkConsistency.js | 223 ------------------ bids-validator/validators/bids/fullTest.js | 15 +- .../validators/bids/groupFileTypes.js | 6 + .../validators/ometiff/checkJSONAndField.js | 83 +++++++ bids-validator/validators/ometiff/index.js | 11 + bids-validator/validators/ometiff/ometiff.js | 158 +++++++++++++ bids-validator/validators/ometiff/validate.js | 31 +++ 10 files changed, 362 insertions(+), 228 deletions(-) create mode 100644 bids-validator/utils/files/generateMergedSidecarWithPath.js create mode 100644 bids-validator/utils/files/readOMEFile.js delete mode 100644 bids-validator/validators/bids/checkConsistency.js create mode 100644 bids-validator/validators/ometiff/checkJSONAndField.js create mode 100644 bids-validator/validators/ometiff/index.js create mode 100644 bids-validator/validators/ometiff/ometiff.js create mode 100644 bids-validator/validators/ometiff/validate.js diff --git a/bids-validator/utils/files/generateMergedSidecarWithPath.js b/bids-validator/utils/files/generateMergedSidecarWithPath.js new file mode 100644 index 000000000..78e9fbe01 --- /dev/null +++ b/bids-validator/utils/files/generateMergedSidecarWithPath.js @@ -0,0 +1,26 @@ +/** + * Generate Merged Sidecar Dictionary + * + * Takes an array of potential sidecards and a + * master object dictionary of all JSON file + * content and returns a merged dictionary + * containing all values from the potential + * sidecars and the sidecarName + */ +function generateMergedSidecarDictWithPath(potentialSidecars, jsonContents) { + let mergedDictionary = {} + potentialSidecars.map(sidecarName => { + const jsonObject = jsonContents[sidecarName] + if (jsonObject) { + mergedDictionary['sidecarName'] = sidecarName + for (var key in jsonObject) { + mergedDictionary[key] = jsonObject[key] + } + } else if (jsonObject === null) { + mergedDictionary.invalid = true + } + }) + return mergedDictionary +} + +export default generateMergedSidecarDictWithPath diff --git a/bids-validator/utils/files/index.js b/bids-validator/utils/files/index.js index 02e7f48b1..2e65ad4c1 100644 --- a/bids-validator/utils/files/index.js +++ b/bids-validator/utils/files/index.js @@ -3,6 +3,7 @@ import FileAPI from './FileAPI' import newFile from './newFile' import readFile from './readFile' +import readOMEFile from './readOMEFile' import readNiftiHeader from './readNiftiHeader' import readDir from './readDir' import potentialLocations from './potentialLocations' @@ -13,6 +14,7 @@ import illegalCharacterTest from './illegalCharacterTest' import sessions from './sessions' import remoteFiles from './remoteFiles' import getFileStats from './getFileStats' +import generateMergedSidecarDictWithPath from './generateMergedSidecarWithPath' // public API --------------------------------------------------------------------- @@ -21,8 +23,10 @@ export default { newFile, readFile, readDir, + readOMEFile, readNiftiHeader, generateMergedSidecarDict, + generateMergedSidecarDictWithPath, potentialLocations, getBFileContent, collectDirectorySize, diff --git a/bids-validator/utils/files/readOMEFile.js b/bids-validator/utils/files/readOMEFile.js new file mode 100644 index 000000000..25120212e --- /dev/null +++ b/bids-validator/utils/files/readOMEFile.js @@ -0,0 +1,33 @@ +import isNode from '../isNode' +import ExifReader from 'exifreader' +const xml2js = require('xml2js') + +const readOMEFile = async omeFile => { + let tags + if (isNode) { + tags = await ExifReader.load(omeFile.path) + } else { + const arrayBuffer = await toArrayBuffer(omeFile) + tags = await ExifReader.load(arrayBuffer) + } + let xml = tags['ImageDescription']['description'] + let parser = new xml2js.Parser() + return await parser.parseStringPromise(xml) +} + +const toArrayBuffer = async file => { + return new Promise((resolve, reject) => { + try { + const reader = new FileReader() + reader.onload = event => { + resolve(event.target.result) + } + + reader.readAsArrayBuffer(file) + } catch (e) { + reject(e) + } + }) +} + +export default readOMEFile diff --git a/bids-validator/validators/bids/checkConsistency.js b/bids-validator/validators/bids/checkConsistency.js deleted file mode 100644 index 9cda92b2b..000000000 --- a/bids-validator/validators/bids/checkConsistency.js +++ /dev/null @@ -1,223 +0,0 @@ -import utils from '../../utils' -import isNode from '../../utils/isNode' -import ExifReader from 'exifreader' -const xml2js = require('xml2js') -const Issue = require('../../utils').issues.Issue - -const getOMETiffData = async omeFile => { - let tags - if (isNode) { - tags = await ExifReader.load(omeFile.path) - } else { - const arrayBuffer = await toArrayBuffer(omeFile) - tags = await ExifReader.load(arrayBuffer) - } - let xml = tags['ImageDescription']['description'] - let parser = new xml2js.Parser() - return await parser.parseStringPromise(xml) -} - -const toArrayBuffer = async file => { - return new Promise((resolve, reject) => { - try { - const reader = new FileReader() - reader.onload = event => { - resolve(event.target.result) - } - - reader.readAsArrayBuffer(file) - } catch (e) { - reject(e) - } - }) -} - -const convertFactor = (omeUnit, jsonUnit) => { - if (omeUnit === jsonUnit || (omeUnit === 'µm' && jsonUnit === 'um')) return 1 - - if (jsonUnit === 'um') { - if (omeUnit === 'mm') { - return 1000 - } else if (omeUnit === 'nm') { - return 0.001 - } - } else if (jsonUnit === 'mm') { - if (omeUnit === 'µm') { - return 0.001 - } else if (omeUnit === 'nm') { - return 0.000001 - } - } else if (jsonUnit === 'nm') { - if (omeUnit === 'mm') { - return 1000000 - } else if (omeUnit === 'µm') { - return 1000 - } - } -} - -const getMatchingComponents = (omeFiles, jsonContentsDict) => { - let components = [] - omeFiles.forEach(omeFile => { - let possibleJsonPath = omeFile.relativePath - .replace('.tif', '') - .replace('.ome', '.json') - - let potentialSidecars = utils.files.potentialLocations(possibleJsonPath) - - const mergedDictionary = utils.files.generateMergedSidecarDict( - potentialSidecars, - jsonContentsDict, - ) - - components.push({ - omeFile: omeFile, - jsonData: mergedDictionary, - }) - }) - - return components -} - -const checkConsistency = (omeFiles, jsonContentsDict) => { - let issues = [] - - let components = getMatchingComponents(omeFiles, jsonContentsDict) - - // if at least one ome-tiff file has no corresponding json file - if (components.length !== omeFiles.length) { - issues.push(new Issue({ code: 223 })) - } - - const omeIssuesPromises = components.map(component => { - return utils.limit( - () => - new Promise(async resolve => { - let jsonData = component.jsonData - let omeData = await getOMETiffData(component.omeFile) - let optionalFieldsIssues = await validateOptionalFields( - component.omeFile.relativePath, - omeData, - jsonData, - ) - let pixelSizeIssues = await validatePixelSize(omeData, jsonData) - issues = issues.concat(optionalFieldsIssues).concat(pixelSizeIssues) - return resolve() - }), - ) - }) - - return new Promise(resolve => - Promise.all(omeIssuesPromises).then(() => resolve(issues)), - ) -} - -const validateOptionalFields = async (omePath, omeData, jsonData) => { - let issues = [] - - let fields = { - Immersion: 'Immersion', - NumericalAperture: 'LensNA', - Magnification: 'NominalMagnification', - } - - if ( - omeData['OME']['Instrument'] && - omeData['OME']['Instrument'][0]['Objective'] - ) { - let objective = omeData['OME']['Instrument'][0]['Objective'][0]['$'] - for (let field in fields) { - let property = fields[field] - if (jsonData.hasOwnProperty(field) && objective[property]) { - if (objective[property] != jsonData[field]) { - issues.push( - new Issue({ - file: { - relativePath: omePath, - path: omePath, - }, - evidence: `JSON field '${field}' is inconsistent`, - code: 224, - }), - ) - } - } - } - } - - return issues -} - -const validateChunkTransformationMatrix = ( - jsonFilePath, - omeFilePath, - jsonData, -) => { - let issues = [] - - /*if chunk- is used either in the filenames of - ome-tiff or JSON files, 'ChunkTransformationMatrix' is recommended*/ - let regex = new RegExp('_chunk-[a-zA-Z0-9]+') - - if (regex.exec(jsonFilePath) || regex.exec(omeFilePath)) { - if (!jsonData.hasOwnProperty('ChunkTransformationMatrix')) { - issues.push(new Issue({ code: 223 })) - } - } - - return issues -} - -const validatePixelSize = async (omeData, jsonData) => { - let issues = [] - let validUnits = ['um', 'µm', 'nm', 'mm'] - - const PhysicalSizeX = - omeData['OME']['Image'][0]['Pixels'][0]['$']['PhysicalSizeX'] - const physicalSizeXUnit = - omeData['OME']['Image'][0]['Pixels'][0]['$']['PhysicalSizeXUnit'] - const PhysicalSizeY = - omeData['OME']['Image'][0]['Pixels'][0]['$']['PhysicalSizeY'] - const physicalSizeYUnit = - omeData['OME']['Image'][0]['Pixels'][0]['$']['PhysicalSizeYUnit'] - const PhysicalSizeZ = - omeData['OME']['Image'][0]['Pixels'][0]['$']['PhysicalSizeZ'] - const physicalSizeZUnit = - omeData['OME']['Image'][0]['Pixels'][0]['$']['PhysicalSizeZUnit'] - - // if no corresponding json file - if (Object.keys(jsonData).length === 0) issues.push(new Issue({ code: 225 })) - - let unitsPendToCheck = [ - physicalSizeXUnit, - physicalSizeYUnit, - physicalSizeZUnit, - ] - - unitsPendToCheck.forEach(unit => { - if (!validUnits.includes(unit)) { - issues.push(new Issue({ code: 222 })) - } - }) - - // if any physicalSizeUnit is not valid or no valid json file, skip the consistency check - if (issues.length > 0) return issues - - let pixelSize = jsonData['PixelSize'] - let physicalSizeUnit = jsonData['PixelSizeUnits'] - - let factorX = convertFactor(physicalSizeXUnit, physicalSizeUnit) - let factorY = convertFactor(physicalSizeYUnit, physicalSizeUnit) - let factorZ = convertFactor(physicalSizeZUnit, physicalSizeUnit) - - if ( - PhysicalSizeX * factorX !== pixelSize[0] || - PhysicalSizeY * factorY !== pixelSize[1] || - PhysicalSizeZ * factorZ !== pixelSize[2] - ) { - issues.push(new Issue({ code: 221 })) - } - - return issues -} -export default checkConsistency diff --git a/bids-validator/validators/bids/fullTest.js b/bids-validator/validators/bids/fullTest.js index e5a29fb69..430a068de 100644 --- a/bids-validator/validators/bids/fullTest.js +++ b/bids-validator/validators/bids/fullTest.js @@ -6,6 +6,7 @@ import json from '../json' import NIFTI from '../nifti' import bval from '../bval' import bvec from '../bvec' +import ometiff from '../ometiff' import Events from '../events' import { session } from '../session' import checkAnyDataPresent from '../checkAnyDataPresent' @@ -15,11 +16,9 @@ import groupFileTypes from './groupFileTypes' import subjects from './subjects' import checkDatasetDescription from './checkDatasetDescription' import checkReadme from './checkReadme' -import checkSamples from './checkSamples' import validateMisc from '../../utils/files/validateMisc' import collectSubjectMetadata from '../../utils/summary/collectSubjectMetadata' import collectPetFields from '../../utils/summary/collectPetFields' -import checkConsistency from './checkConsistency' /** * Full Test @@ -144,8 +143,14 @@ const fullTest = (fileList, options, annexed, dir, schema, callback) => { // Check for samples file in the proper place (only for the microscopy modality) if (summary.modalities.includes('Microscopy')) { - const samplesIssues = checkSamples(fileList) - self.issues = self.issues.concat(samplesIssues) + const samplesIssues = ometiff.checkSamples(fileList) + const jsonAndFieldIssues = ometiff.checkJSONAndField( + files, + jsonContentsDict, + ) + self.issues = self.issues + .concat(samplesIssues) + .concat(jsonAndFieldIssues) } // Validate json files and contents return json.validate(jsonFiles, fileList, jsonContentsDict, summary) @@ -154,7 +159,7 @@ const fullTest = (fileList, options, annexed, dir, schema, callback) => { self.issues = self.issues.concat(jsonIssues) // ome-tiff consistency check - return checkConsistency(files.ome, jsonContentsDict) + return ometiff.validate(files.ome, jsonContentsDict) }) .then(omeIssues => { self.issues = self.issues.concat(omeIssues) diff --git a/bids-validator/validators/bids/groupFileTypes.js b/bids-validator/validators/bids/groupFileTypes.js index eb4f4ff1b..4e981ba11 100644 --- a/bids-validator/validators/bids/groupFileTypes.js +++ b/bids-validator/validators/bids/groupFileTypes.js @@ -12,6 +12,8 @@ const groupFileTypes = (fileList, options) => { contRecord: [], invalid: [], ome: [], + png: [], + tif: [], // used to check all files not already passed through testFile() misc: [], } @@ -56,6 +58,10 @@ const sortFiles = (fileList, options, files) => { } else if (ofType(filename, 'ome.tif') || ofType(filename, 'ome.btf')) { // collect ome-tiff files.ome.push(file) + } else if (ofType(filename, 'png')) { + files.png.push(file) + } else if (ofType(filename, 'tif') && !ofType(filename, 'ome.tif')) { + files.tif.push(file) } else { files.misc.push(file) } diff --git a/bids-validator/validators/ometiff/checkJSONAndField.js b/bids-validator/validators/ometiff/checkJSONAndField.js new file mode 100644 index 000000000..e885023be --- /dev/null +++ b/bids-validator/validators/ometiff/checkJSONAndField.js @@ -0,0 +1,83 @@ +import utils from '../../utils' +const Issue = utils.issues.Issue + +const checkJSONAndField = (files, jsonContentsDict) => { + let issues = [] + if (files.ome) { + files.ome.forEach(file => { + let possibleJsonPath = file.relativePath + .replace('.tif', '') + .replace('.ome', '.json') + + issues = issues.concat( + ifJsonExist(file, possibleJsonPath, jsonContentsDict), + ) + }) + } + if (files.png) { + files.png.forEach(file => { + if (!file.relativePath.includes('_photo')) { + let possibleJsonPath = file.relativePath.replace('.png', '.json') + issues = issues.concat( + ifJsonExist(file, possibleJsonPath, jsonContentsDict), + ) + } + }) + } + if (files.tif) { + files.tif.forEach(file => { + if (!file.relativePath.includes('_photo')) { + let possibleJsonPath = file.relativePath.replace('.tif', '.json') + issues = issues.concat( + ifJsonExist(file, possibleJsonPath, jsonContentsDict), + ) + } + }) + } + return issues +} + +const ifJsonExist = (file, possibleJsonPath, jsonContentsDict) => { + let potentialSidecars = utils.files.potentialLocations(possibleJsonPath) + + const mergedDictionary = utils.files.generateMergedSidecarDictWithPath( + potentialSidecars, + jsonContentsDict, + ) + + // check if the given file has a corresponding JSON file + if (Object.keys(mergedDictionary).length === 0) { + return [ + new Issue({ + file: file, + code: 225, + }), + ] + } else { + return checkMatrixField(file, mergedDictionary) + } +} + +const checkMatrixField = (file, mergedDictionary) => { + let issues = [] + let regex = new RegExp('_chunk-[a-zA-Z0-9]+') + let jsonPath = mergedDictionary.sidecarName + + // ChunkTransformationMatrix was OPTIONAL and is now RECOMMENDED if is used in filenames + if (regex.exec(file.relativePath) || regex.exec(jsonPath)) { + if (!mergedDictionary.hasOwnProperty('ChunkTransformationMatrix')) { + issues.push( + new Issue({ + file: { + path: jsonPath, + relativePath: jsonPath, + }, + code: 223, + }), + ) + } + } + return issues +} + +export default checkJSONAndField diff --git a/bids-validator/validators/ometiff/index.js b/bids-validator/validators/ometiff/index.js new file mode 100644 index 000000000..ab3895a9c --- /dev/null +++ b/bids-validator/validators/ometiff/index.js @@ -0,0 +1,11 @@ +import ometiff from './ometiff' +import validate from './validate' +import checkSamples from './checkSamples' +import checkJSONAndField from './checkJSONAndField' + +export default { + ometiff, + validate, + checkSamples, + checkJSONAndField, +} diff --git a/bids-validator/validators/ometiff/ometiff.js b/bids-validator/validators/ometiff/ometiff.js new file mode 100644 index 000000000..bb76436ce --- /dev/null +++ b/bids-validator/validators/ometiff/ometiff.js @@ -0,0 +1,158 @@ +import utils from '../../utils' + +const Issue = require('../../utils').issues.Issue + +/** + * ometiff + * + * Takes an ometiff file, its omedata as an object + * and a callback as arguments. Callback + * with any issues it finds while validating + * against the BIDS specification. + */ +export default function ometiff(file, omeData, jsonContentsDict, callback) { + let issues = [] + + let mergedDictionary = getMergedDictionary(file, jsonContentsDict) + + // Check for consistency with optional OME-TIFF metadata if present for + // Immersion, NumericalAperture and Magnification + let optionalFieldsIssues = checkOptionalFields( + file.relativePath, + omeData, + mergedDictionary, + ) + + // Check for consistency for PixelSize between JSON and OME-TIFF metadata + let pixelSizeIssues = checkPixelSize(omeData, mergedDictionary) + + issues = issues.concat(optionalFieldsIssues).concat(pixelSizeIssues) + + callback(issues) +} + +const convertFactor = (omeUnit, jsonUnit) => { + if (omeUnit === jsonUnit || (omeUnit === 'µm' && jsonUnit === 'um')) return 1 + + if (jsonUnit === 'um') { + if (omeUnit === 'mm') { + return 1000 + } else if (omeUnit === 'nm') { + return 0.001 + } + } else if (jsonUnit === 'mm') { + if (omeUnit === 'µm') { + return 0.001 + } else if (omeUnit === 'nm') { + return 0.000001 + } + } else if (jsonUnit === 'nm') { + if (omeUnit === 'mm') { + return 1000000 + } else if (omeUnit === 'µm') { + return 1000 + } + } +} + +const getMergedDictionary = (file, jsonContentsDict) => { + let possibleJsonPath = file.relativePath + .replace('.tif', '') + .replace('.ome', '.json') + + let potentialSidecars = utils.files.potentialLocations(possibleJsonPath) + + return utils.files.generateMergedSidecarDict( + potentialSidecars, + jsonContentsDict, + ) +} + +const checkOptionalFields = (omePath, omeData, jsonData) => { + let issues = [] + + let fields = { + Immersion: 'Immersion', + NumericalAperture: 'LensNA', + Magnification: 'NominalMagnification', + } + + if ( + omeData['OME']['Instrument'] && + omeData['OME']['Instrument'][0]['Objective'] + ) { + let objective = omeData['OME']['Instrument'][0]['Objective'][0]['$'] + for (let field in fields) { + let property = fields[field] + if (jsonData.hasOwnProperty(field) && objective[property]) { + if (objective[property] != jsonData[field]) { + issues.push( + new Issue({ + file: { + relativePath: omePath, + path: omePath, + }, + evidence: `JSON field '${field}' is inconsistent`, + code: 224, + }), + ) + } + } + } + } + + return issues +} + +const checkPixelSize = (omeData, jsonData) => { + let issues = [] + let validUnits = ['um', 'µm', 'nm', 'mm'] + + const PhysicalSizeX = + omeData['OME']['Image'][0]['Pixels'][0]['$']['PhysicalSizeX'] + const physicalSizeXUnit = + omeData['OME']['Image'][0]['Pixels'][0]['$']['PhysicalSizeXUnit'] + const PhysicalSizeY = + omeData['OME']['Image'][0]['Pixels'][0]['$']['PhysicalSizeY'] + const physicalSizeYUnit = + omeData['OME']['Image'][0]['Pixels'][0]['$']['PhysicalSizeYUnit'] + const PhysicalSizeZ = + omeData['OME']['Image'][0]['Pixels'][0]['$']['PhysicalSizeZ'] + const physicalSizeZUnit = + omeData['OME']['Image'][0]['Pixels'][0]['$']['PhysicalSizeZUnit'] + + // if no corresponding json file, skip the consistency check + if (Object.keys(jsonData).length === 0) return [] + + let unitsPendToCheck = [ + physicalSizeXUnit, + physicalSizeYUnit, + physicalSizeZUnit, + ] + + unitsPendToCheck.forEach(unit => { + if (!validUnits.includes(unit)) { + issues.push(new Issue({ code: 222 })) + } + }) + + // if any physicalSizeUnit is not valid or no valid json file, skip the consistency check + if (issues.length > 0) return issues + + let pixelSize = jsonData['PixelSize'] + let physicalSizeUnit = jsonData['PixelSizeUnits'] + + let factorX = convertFactor(physicalSizeXUnit, physicalSizeUnit) + let factorY = convertFactor(physicalSizeYUnit, physicalSizeUnit) + let factorZ = convertFactor(physicalSizeZUnit, physicalSizeUnit) + + if ( + PhysicalSizeX * factorX !== pixelSize[0] || + PhysicalSizeY * factorY !== pixelSize[1] || + PhysicalSizeZ * factorZ !== pixelSize[2] + ) { + issues.push(new Issue({ code: 221 })) + } + + return issues +} diff --git a/bids-validator/validators/ometiff/validate.js b/bids-validator/validators/ometiff/validate.js new file mode 100644 index 000000000..b727652e1 --- /dev/null +++ b/bids-validator/validators/ometiff/validate.js @@ -0,0 +1,31 @@ +import utils from '../../utils' +import ometiff from './ometiff' + +const validate = (files, jsonContentsDict) => { + let issues = [] + // validate ometiff + const omePromises = files.map(function(file) { + return utils.limit( + () => + new Promise((resolve, reject) => { + utils.files + .readOMEFile(file) + .then(omeData => { + ometiff(file, omeData, jsonContentsDict, function(omeIssues) { + issues = issues.concat(omeIssues) + resolve() + }) + }) + .catch(err => + utils.issues.redirect(err, reject, () => { + issues.push(err) + resolve() + }), + ) + }), + ) + }) + return Promise.all(omePromises).then(() => issues) +} + +export default validate From 91d52a58c855043376196075c69cb7fdd064edb6 Mon Sep 17 00:00:00 2001 From: Yuan Wang Date: Thu, 23 Dec 2021 17:28:11 -0700 Subject: [PATCH 060/101] Resolve comments --- ...dSidecarWithPath.js => generateMergedSidecarDictWithPath.js} | 0 bids-validator/utils/files/index.js | 2 +- bids-validator/validators/ometiff/checkJSONAndField.js | 2 +- 3 files changed, 2 insertions(+), 2 deletions(-) rename bids-validator/utils/files/{generateMergedSidecarWithPath.js => generateMergedSidecarDictWithPath.js} (100%) diff --git a/bids-validator/utils/files/generateMergedSidecarWithPath.js b/bids-validator/utils/files/generateMergedSidecarDictWithPath.js similarity index 100% rename from bids-validator/utils/files/generateMergedSidecarWithPath.js rename to bids-validator/utils/files/generateMergedSidecarDictWithPath.js diff --git a/bids-validator/utils/files/index.js b/bids-validator/utils/files/index.js index 2e65ad4c1..faa1d26fc 100644 --- a/bids-validator/utils/files/index.js +++ b/bids-validator/utils/files/index.js @@ -14,7 +14,7 @@ import illegalCharacterTest from './illegalCharacterTest' import sessions from './sessions' import remoteFiles from './remoteFiles' import getFileStats from './getFileStats' -import generateMergedSidecarDictWithPath from './generateMergedSidecarWithPath' +import generateMergedSidecarDictWithPath from './generateMergedSidecarDictWithPath' // public API --------------------------------------------------------------------- diff --git a/bids-validator/validators/ometiff/checkJSONAndField.js b/bids-validator/validators/ometiff/checkJSONAndField.js index e885023be..f7ba9b0f7 100644 --- a/bids-validator/validators/ometiff/checkJSONAndField.js +++ b/bids-validator/validators/ometiff/checkJSONAndField.js @@ -63,7 +63,7 @@ const checkMatrixField = (file, mergedDictionary) => { let regex = new RegExp('_chunk-[a-zA-Z0-9]+') let jsonPath = mergedDictionary.sidecarName - // ChunkTransformationMatrix was OPTIONAL and is now RECOMMENDED if is used in filenames + // ChunkTransformationMatrix is RECOMMENDED if is used in filenames if (regex.exec(file.relativePath) || regex.exec(jsonPath)) { if (!mergedDictionary.hasOwnProperty('ChunkTransformationMatrix')) { issues.push( From 40d01e19d0e6f94cdfdc94d765322d984536464c Mon Sep 17 00:00:00 2001 From: Yuan Wang Date: Thu, 23 Dec 2021 17:31:40 -0700 Subject: [PATCH 061/101] Rename folder ometiff to microscopy --- bids-validator/validators/bids/fullTest.js | 2 +- .../validators/{ometiff => microscopy}/checkJSONAndField.js | 0 bids-validator/validators/{bids => microscopy}/checkSamples.js | 0 bids-validator/validators/{ometiff => microscopy}/index.js | 0 bids-validator/validators/{ometiff => microscopy}/ometiff.js | 0 bids-validator/validators/{ometiff => microscopy}/validate.js | 0 6 files changed, 1 insertion(+), 1 deletion(-) rename bids-validator/validators/{ometiff => microscopy}/checkJSONAndField.js (100%) rename bids-validator/validators/{bids => microscopy}/checkSamples.js (100%) rename bids-validator/validators/{ometiff => microscopy}/index.js (100%) rename bids-validator/validators/{ometiff => microscopy}/ometiff.js (100%) rename bids-validator/validators/{ometiff => microscopy}/validate.js (100%) diff --git a/bids-validator/validators/bids/fullTest.js b/bids-validator/validators/bids/fullTest.js index 430a068de..67061b534 100644 --- a/bids-validator/validators/bids/fullTest.js +++ b/bids-validator/validators/bids/fullTest.js @@ -6,7 +6,7 @@ import json from '../json' import NIFTI from '../nifti' import bval from '../bval' import bvec from '../bvec' -import ometiff from '../ometiff' +import ometiff from '../microscopy' import Events from '../events' import { session } from '../session' import checkAnyDataPresent from '../checkAnyDataPresent' diff --git a/bids-validator/validators/ometiff/checkJSONAndField.js b/bids-validator/validators/microscopy/checkJSONAndField.js similarity index 100% rename from bids-validator/validators/ometiff/checkJSONAndField.js rename to bids-validator/validators/microscopy/checkJSONAndField.js diff --git a/bids-validator/validators/bids/checkSamples.js b/bids-validator/validators/microscopy/checkSamples.js similarity index 100% rename from bids-validator/validators/bids/checkSamples.js rename to bids-validator/validators/microscopy/checkSamples.js diff --git a/bids-validator/validators/ometiff/index.js b/bids-validator/validators/microscopy/index.js similarity index 100% rename from bids-validator/validators/ometiff/index.js rename to bids-validator/validators/microscopy/index.js diff --git a/bids-validator/validators/ometiff/ometiff.js b/bids-validator/validators/microscopy/ometiff.js similarity index 100% rename from bids-validator/validators/ometiff/ometiff.js rename to bids-validator/validators/microscopy/ometiff.js diff --git a/bids-validator/validators/ometiff/validate.js b/bids-validator/validators/microscopy/validate.js similarity index 100% rename from bids-validator/validators/ometiff/validate.js rename to bids-validator/validators/microscopy/validate.js From 725a7f03c9c11154bc1ff21d7792a1c464ea061c Mon Sep 17 00:00:00 2001 From: Yuan Wang Date: Mon, 27 Dec 2021 16:05:44 -0700 Subject: [PATCH 062/101] 1. Update regex rules for microscopy 2. Update microscopy schema json 3. Code optimization for tsv.js --- .../rules/file_level_rules.json | 4 +- .../rules/session_level_rules.json | 2 +- .../bids_validator/rules/top_level_rules.json | 2 +- .../validators/json/schemas/microscopy.json | 22 +++-- bids-validator/validators/tsv/tsv.js | 80 ++++++++----------- 5 files changed, 47 insertions(+), 63 deletions(-) diff --git a/bids-validator/bids_validator/rules/file_level_rules.json b/bids-validator/bids_validator/rules/file_level_rules.json index 957d6dcfe..0c36e5081 100644 --- a/bids-validator/bids_validator/rules/file_level_rules.json +++ b/bids-validator/bids_validator/rules/file_level_rules.json @@ -526,7 +526,7 @@ }, "microscopy": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?micr[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_sample-[a-zA-Z0-9]+)(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_chunk-[a-zA-Z0-9]+)?((@@@_microscopy_type_@@@)(@@@_microscopy_ext_@@@))$", + "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?micr[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_sample-[a-zA-Z0-9]+)(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_chunk-[0-9]+)?((@@@_microscopy_type_@@@)(@@@_microscopy_ext_@@@))$", "tokens": { "@@@_microscopy_type_@@@": [ "_TEM", @@ -566,7 +566,7 @@ } }, "microscopy_json": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?micr[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_sample-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_chunk-[a-zA-Z0-9]+)?(@@@_microscopy_type_@@@)\\.json$", + "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?micr[\\/\\\\](sub-[a-zA-Z0-9]+)(?:(_ses-[a-zA-Z0-9]+))?(?:_sample-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_chunk-[0-9]+)?(@@@_microscopy_type_@@@)\\.json$", "tokens": { "@@@_microscopy_type_@@@": [ "_TEM", diff --git a/bids-validator/bids_validator/rules/session_level_rules.json b/bids-validator/bids_validator/rules/session_level_rules.json index f3cfc3779..241ef3c10 100644 --- a/bids-validator/bids_validator/rules/session_level_rules.json +++ b/bids-validator/bids_validator/rules/session_level_rules.json @@ -204,7 +204,7 @@ }, "microscopy_ses": { - "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?\\1(_\\2)?(?:_sample-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+_)?(?:_chunk-[a-zA-Z0-9]+)?(@@@_microscopy_ses_type_@@@)$", + "regexp": "^[\\/\\\\](sub-[a-zA-Z0-9]+)[\\/\\\\](?:(ses-[a-zA-Z0-9]+)[\\/\\\\])?\\1(_\\2)?(?:_sample-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+_)?(?:_chunk-[0-9]+)?(@@@_microscopy_ses_type_@@@)$", "tokens": { "@@@_microscopy_ses_type_@@@": [ "_TEM.json", diff --git a/bids-validator/bids_validator/rules/top_level_rules.json b/bids-validator/bids_validator/rules/top_level_rules.json index cfffc6ea3..4e80dc1e6 100644 --- a/bids-validator/bids_validator/rules/top_level_rules.json +++ b/bids-validator/bids_validator/rules/top_level_rules.json @@ -136,7 +136,7 @@ }, "microscopy_top": { - "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(?:_sample-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_chunk-[a-zA-Z0-9]+)?(?:@@@_microscopy_top_ext_@@@)$", + "regexp": "^[\\/\\\\](?:ses-[a-zA-Z0-9]+_)?(?:_sample-[a-zA-Z0-9]+)?(?:_acq-[a-zA-Z0-9]+)?(?:_stain-[a-zA-Z0-9]+)?(?:_run-[0-9]+)?(?:_chunk-[0-9]+)?(?:@@@_microscopy_top_ext_@@@)$", "tokens": { "@@@_microscopy_top_ext_@@@": [ "_TEM\\.json", diff --git a/bids-validator/validators/json/schemas/microscopy.json b/bids-validator/validators/json/schemas/microscopy.json index 3ac2ce639..c06203325 100644 --- a/bids-validator/validators/json/schemas/microscopy.json +++ b/bids-validator/validators/json/schemas/microscopy.json @@ -33,27 +33,25 @@ { "type": "array", "items": { "type": "string" } } ] }, - "SliceThickness": { "type": "number" }, + "SliceThickness": { "type": "number", "exclusiveMinimum": 0 }, "SampleExtractionProtocol": { "type": "string", "minLength": 1 }, "SampleExtractionInstitution": { "type": "string", "minLength": 1 }, "TissueDeformationScaling": { "type": "number" }, - "PixelSize": { "type": "array", "items": { "type": "number" } }, + "PixelSize": {"type": "array", "minItems": 2, "maxItems": 3, "items": { "type": "number", "minimum": 0 } }, "PixelSizeUnits": { "type": "string", "enum": ["mm", "um", "nm"] }, "Immersion": { "type": "string", "minLength": 1 }, - "NumericalAperture": { "type": "number" }, - "Magnification": { "type": "number" }, + "NumericalAperture": { "type": "number", "exclusiveMinimum": 0 }, + "Magnification": { "type": "number", "exclusiveMinimum": 0 }, "ImageAcquisitionProtocol": { "type": "string", "minLength": 1 }, "OtherAcquisitionParameters": { "type": "string", "minLength": 1 }, "ChunkTransformationMatrix": { - "type": "array", - "items": { "type": "array", "items": { "type": "number" } } + "anyOf": [ + {"type": "array", "minItems": 3, "maxItems": 3, "items": { "type": "array", "minItems": 3, "maxItems": 3, "items":{ "type": "number"} } }, + {"type": "array", "minItems": 4, "maxItems": 4, "items": { "type": "array", "minItems": 4, "maxItems": 4, "items":{ "type": "number"} } } + ] + }, + "ChunkTransformationMatrixAxis": { "type": "array", "minItems": 2, "maxItems": 3, "items": { "type": "string" }} }, - "ChunkTransformationMatrixAxis": { - "type": "array", - "items": { "type": "string" } - } - }, - "required": ["PixelSize", "PixelSizeUnits"], "recommended": [ diff --git a/bids-validator/validators/tsv/tsv.js b/bids-validator/validators/tsv/tsv.js index 3a53c8b53..10d36c63e 100644 --- a/bids-validator/validators/tsv/tsv.js +++ b/bids-validator/validators/tsv/tsv.js @@ -336,7 +336,6 @@ const TSV = (file, contents, fileList, callback) => { }), ) } - // obtain a list of the sample IDs in the samples.tsv file const sample = row[sampleIdColumn].replace('sample-', '') @@ -346,59 +345,49 @@ const TSV = (file, contents, fileList, callback) => { samples.push(sample) } - // check if a sample froma same subject is described by one and only one row - var doesSampleIdHaveDuplicates = sampleIdColumnValues.some( - (val, i) => sampleIdColumnValues.indexOf(val) !== i - ) - - for (let r = 0; r < rows.length-1; r++) { - for (let l = 0; l < rows.length-1; l++) { - if (l == r) {} - else { - if (sampleIdColumnValues[r] == sampleIdColumnValues[l]) { - if (participantIdColumnValues[r] != participantIdColumnValues[l]){ - doesSampleIdHaveDuplicates = false - } - else { - doesSampleIdHaveDuplicates = true - } - } - } - } - } - if(doesSampleIdHaveDuplicates == true) { - issues.push( - new Issue({ - file: file, - evidence: sampleIdColumnValues, - reason: 'Each sample from a same subject MUST be described by one and only one row.', - line: 1, - code: 220, - }) + // check if a sample from same subject is described by one and only one row + let samplePartIdsSet = new Set() + for (let r = 0; r < rows.length - 1; r++) { + let uniqueString = sampleIdColumnValues[r].concat( + participantIdColumnValues[r], ) + // check if SampleId Have Duplicate + if (samplePartIdsSet.has(uniqueString)) { + issues.push( + new Issue({ + file: file, + evidence: sampleIdColumnValues, + reason: + 'Each sample from a same subject MUST be described by one and only one row.', + line: 1, + code: 220, + }), + ) + break + } else samplePartIdsSet.add(uniqueString) } - else {} - } - // check if any incorrect patterns in sample_type column for (let c = 1; c < rows.length; c++) { const row = rows[c] - if ((row[sampleTypeColumn] != 'cell line') && - (row[sampleTypeColumn] != 'in vitro differentiated cells') && - (row[sampleTypeColumn] != 'primary cell') && - (row[sampleTypeColumn] != 'cell-free sample') && - (row[sampleTypeColumn] != 'cloning host') && - (row[sampleTypeColumn] != 'tissue') && - (row[sampleTypeColumn] != 'whole organisms') && - (row[sampleTypeColumn] != 'organoid') && - (row[sampleTypeColumn] != 'technical sample')) { - issues.push( + const validSampleTypes = [ + 'cell line', + 'in vitro differentiated cells', + 'primary cell', + 'cell-free sample', + 'cloning host', + 'tissue', + 'whole organisms', + 'organoid', + 'technical sample', + ] + if (!validSampleTypes.includes(row[sampleTypeColumn])) { + issues.push( new Issue({ file: file, evidence: row[sampleTypeColumn], - reason: 'sample_type can\'t be any value.', + reason: "sample_type can't be any value.", line: c, code: 219, }), @@ -406,9 +395,6 @@ const TSV = (file, contents, fileList, callback) => { } } } - - - if ( file.relativePath.includes('/meg/') && From 4ee01996bf32f88b25b2282fdaea96268b90416b Mon Sep 17 00:00:00 2001 From: Yuan Wang Date: Mon, 27 Dec 2021 16:07:55 -0700 Subject: [PATCH 063/101] Update regex for matrix field check --- bids-validator/validators/microscopy/checkJSONAndField.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bids-validator/validators/microscopy/checkJSONAndField.js b/bids-validator/validators/microscopy/checkJSONAndField.js index f7ba9b0f7..ee0eebdfa 100644 --- a/bids-validator/validators/microscopy/checkJSONAndField.js +++ b/bids-validator/validators/microscopy/checkJSONAndField.js @@ -60,7 +60,7 @@ const ifJsonExist = (file, possibleJsonPath, jsonContentsDict) => { const checkMatrixField = (file, mergedDictionary) => { let issues = [] - let regex = new RegExp('_chunk-[a-zA-Z0-9]+') + let regex = new RegExp('_chunk-[0-9]+') let jsonPath = mergedDictionary.sidecarName // ChunkTransformationMatrix is RECOMMENDED if is used in filenames From 37de6dfaf63b051f47d57b78f84005712c9be074 Mon Sep 17 00:00:00 2001 From: Yuan Wang Date: Tue, 28 Dec 2021 14:15:11 -0700 Subject: [PATCH 064/101] Bug fix --- bids-validator/validators/tsv/tsv.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bids-validator/validators/tsv/tsv.js b/bids-validator/validators/tsv/tsv.js index 10d36c63e..72a26869d 100644 --- a/bids-validator/validators/tsv/tsv.js +++ b/bids-validator/validators/tsv/tsv.js @@ -369,7 +369,7 @@ const TSV = (file, contents, fileList, callback) => { } // check if any incorrect patterns in sample_type column - for (let c = 1; c < rows.length; c++) { + for (let c = 0; c < rows.length; c++) { const row = rows[c] const validSampleTypes = [ 'cell line', From 819844958cdabaa05f51064a70afb47ac8784d08 Mon Sep 17 00:00:00 2001 From: Yuan Wang Date: Wed, 29 Dec 2021 13:54:16 -0700 Subject: [PATCH 065/101] Update the error message when sample type is not valid --- bids-validator/validators/tsv/tsv.js | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/bids-validator/validators/tsv/tsv.js b/bids-validator/validators/tsv/tsv.js index 72a26869d..7d3ad2c57 100644 --- a/bids-validator/validators/tsv/tsv.js +++ b/bids-validator/validators/tsv/tsv.js @@ -369,26 +369,26 @@ const TSV = (file, contents, fileList, callback) => { } // check if any incorrect patterns in sample_type column - for (let c = 0; c < rows.length; c++) { + const validSampleTypes = [ + 'cell line', + 'in vitro differentiated cells', + 'primary cell', + 'cell-free sample', + 'cloning host', + 'tissue', + 'whole organisms', + 'organoid', + 'technical sample', + ] + for (let c = 1; c < rows.length; c++) { const row = rows[c] - const validSampleTypes = [ - 'cell line', - 'in vitro differentiated cells', - 'primary cell', - 'cell-free sample', - 'cloning host', - 'tissue', - 'whole organisms', - 'organoid', - 'technical sample', - ] if (!validSampleTypes.includes(row[sampleTypeColumn])) { issues.push( new Issue({ file: file, evidence: row[sampleTypeColumn], reason: "sample_type can't be any value.", - line: c, + line: c + 1, code: 219, }), ) From 5933078fdad94c777daa7dd0ad539e09e43da5b9 Mon Sep 17 00:00:00 2001 From: Yuan Wang Date: Sun, 2 Jan 2022 16:33:22 -0700 Subject: [PATCH 066/101] 1. Add validation for tiff and big tiff 2. Add validation for inconsistent tiff type and extension --- bids-validator/utils/files/index.js | 2 + bids-validator/utils/files/readBuffer.js | 23 +++++++ bids-validator/utils/files/readOMEFile.js | 32 +++------- bids-validator/utils/issues/list.js | 11 ++++ .../validators/bids/groupFileTypes.js | 6 +- .../microscopy/checkJSONAndField.js | 1 + .../validators/microscopy/ometiff.js | 3 +- .../validators/microscopy/validate.js | 63 ++++++++++++++----- .../microscopy/validateTiffSignature.js | 20 ++++++ 9 files changed, 120 insertions(+), 41 deletions(-) create mode 100644 bids-validator/utils/files/readBuffer.js create mode 100644 bids-validator/validators/microscopy/validateTiffSignature.js diff --git a/bids-validator/utils/files/index.js b/bids-validator/utils/files/index.js index faa1d26fc..b93324da6 100644 --- a/bids-validator/utils/files/index.js +++ b/bids-validator/utils/files/index.js @@ -4,6 +4,7 @@ import FileAPI from './FileAPI' import newFile from './newFile' import readFile from './readFile' import readOMEFile from './readOMEFile' +import readBuffer from './readBuffer' import readNiftiHeader from './readNiftiHeader' import readDir from './readDir' import potentialLocations from './potentialLocations' @@ -23,6 +24,7 @@ export default { newFile, readFile, readDir, + readBuffer, readOMEFile, readNiftiHeader, generateMergedSidecarDict, diff --git a/bids-validator/utils/files/readBuffer.js b/bids-validator/utils/files/readBuffer.js new file mode 100644 index 000000000..732a50ae2 --- /dev/null +++ b/bids-validator/utils/files/readBuffer.js @@ -0,0 +1,23 @@ +import isNode from '../isNode' +import fs from 'fs' + +const readBuffer = file => { + return new Promise((resolve, reject) => { + if (isNode) { + resolve(fs.readFileSync(file.path)) + } else { + try { + const reader = new FileReader() + reader.onload = event => { + resolve(event.target.result) + } + + reader.readAsArrayBuffer(file) + } catch (e) { + reject(e) + } + } + }) +} + +export default readBuffer diff --git a/bids-validator/utils/files/readOMEFile.js b/bids-validator/utils/files/readOMEFile.js index 25120212e..bdbc2ac56 100644 --- a/bids-validator/utils/files/readOMEFile.js +++ b/bids-validator/utils/files/readOMEFile.js @@ -1,32 +1,16 @@ -import isNode from '../isNode' import ExifReader from 'exifreader' const xml2js = require('xml2js') -const readOMEFile = async omeFile => { - let tags - if (isNode) { - tags = await ExifReader.load(omeFile.path) - } else { - const arrayBuffer = await toArrayBuffer(omeFile) - tags = await ExifReader.load(arrayBuffer) - } +const readOMEFile = buffer => { + let tags = ExifReader.load(buffer) let xml = tags['ImageDescription']['description'] - let parser = new xml2js.Parser() - return await parser.parseStringPromise(xml) -} - -const toArrayBuffer = async file => { return new Promise((resolve, reject) => { - try { - const reader = new FileReader() - reader.onload = event => { - resolve(event.target.result) - } - - reader.readAsArrayBuffer(file) - } catch (e) { - reject(e) - } + xml2js + .parseStringPromise(xml) + .then(result => { + resolve(result) + }) + .catch(error => reject(error)) }) } diff --git a/bids-validator/utils/issues/list.js b/bids-validator/utils/issues/list.js index c687ddf65..511383e76 100644 --- a/bids-validator/utils/issues/list.js +++ b/bids-validator/utils/issues/list.js @@ -1108,4 +1108,15 @@ export default { severity: 'error', reason: 'No valid JSON file found for this file', }, + 226: { + key: 'UNSUPPORTED_BIG_TIFF', + severity: 'error', + reason: + 'BigTiff OME-TIFF file is not supported for consistency check right now, please use Tiff OME-TIFF file instead', + }, + 227: { + key: 'INCONSISTENT_TIFF_EXTENSION', + severity: 'error', + reason: 'Inconsistent TIFF file type and extension', + }, } diff --git a/bids-validator/validators/bids/groupFileTypes.js b/bids-validator/validators/bids/groupFileTypes.js index 4e981ba11..b76eca201 100644 --- a/bids-validator/validators/bids/groupFileTypes.js +++ b/bids-validator/validators/bids/groupFileTypes.js @@ -60,7 +60,11 @@ const sortFiles = (fileList, options, files) => { files.ome.push(file) } else if (ofType(filename, 'png')) { files.png.push(file) - } else if (ofType(filename, 'tif') && !ofType(filename, 'ome.tif')) { + } else if ( + ofType(filename, 'tif') && + !ofType(filename, 'ome.tif') && + !ofType(filename, 'ome.btf') + ) { files.tif.push(file) } else { files.misc.push(file) diff --git a/bids-validator/validators/microscopy/checkJSONAndField.js b/bids-validator/validators/microscopy/checkJSONAndField.js index ee0eebdfa..be338a256 100644 --- a/bids-validator/validators/microscopy/checkJSONAndField.js +++ b/bids-validator/validators/microscopy/checkJSONAndField.js @@ -7,6 +7,7 @@ const checkJSONAndField = (files, jsonContentsDict) => { files.ome.forEach(file => { let possibleJsonPath = file.relativePath .replace('.tif', '') + .replace('.btf', '') .replace('.ome', '.json') issues = issues.concat( diff --git a/bids-validator/validators/microscopy/ometiff.js b/bids-validator/validators/microscopy/ometiff.js index bb76436ce..5fa434cbc 100644 --- a/bids-validator/validators/microscopy/ometiff.js +++ b/bids-validator/validators/microscopy/ometiff.js @@ -1,6 +1,5 @@ import utils from '../../utils' - -const Issue = require('../../utils').issues.Issue +const Issue = utils.issues.Issue /** * ometiff diff --git a/bids-validator/validators/microscopy/validate.js b/bids-validator/validators/microscopy/validate.js index b727652e1..650ed0996 100644 --- a/bids-validator/validators/microscopy/validate.js +++ b/bids-validator/validators/microscopy/validate.js @@ -1,5 +1,10 @@ import utils from '../../utils' +const Issue = utils.issues.Issue import ometiff from './ometiff' +import validateTiffSignature from './validateTiffSignature' + +const TIFF_ID = 0x2a +const BIG_TIFF_ID = 0x2b const validate = (files, jsonContentsDict) => { let issues = [] @@ -8,20 +13,50 @@ const validate = (files, jsonContentsDict) => { return utils.limit( () => new Promise((resolve, reject) => { - utils.files - .readOMEFile(file) - .then(omeData => { - ometiff(file, omeData, jsonContentsDict, function(omeIssues) { - issues = issues.concat(omeIssues) - resolve() - }) - }) - .catch(err => - utils.issues.redirect(err, reject, () => { - issues.push(err) - resolve() - }), - ) + utils.files.readBuffer(file).then(buffer => { + if (validateTiffSignature(buffer, TIFF_ID)) { + if (file.relativePath.endsWith('.ome.btf')) { + issues.push( + new Issue({ + code: 227, + file: file, + evidence: `Inconsistent TIFF file type and extension, given .ome.btf but should be .ome.tif`, + }), + ) + } + utils.files + .readOMEFile(buffer) + .then(omeData => { + ometiff(file, omeData, jsonContentsDict, function(omeIssues) { + issues = issues.concat(omeIssues) + resolve() + }) + }) + .catch(err => + utils.issues.redirect(err, reject, () => { + issues.push(err) + resolve() + }), + ) + } else if (validateTiffSignature(buffer, BIG_TIFF_ID)) { + if (file.relativePath.endsWith('.ome.tif')) { + issues.push( + new Issue({ + code: 227, + file: file, + evidence: `Inconsistent TIFF file type and extension, given .ome.tif but should be .ome.btf`, + }), + ) + } + issues.push( + new Issue({ + code: 226, + file: file, + }), + ) + resolve() + } + }) }), ) }) diff --git a/bids-validator/validators/microscopy/validateTiffSignature.js b/bids-validator/validators/microscopy/validateTiffSignature.js new file mode 100644 index 000000000..5ac7265b3 --- /dev/null +++ b/bids-validator/validators/microscopy/validateTiffSignature.js @@ -0,0 +1,20 @@ +import isNode from '../../utils/isNode' + +const getDataView = buffer => { + if (isNode) { + const uint8arr = new Uint8Array(buffer.byteLength) + buffer.copy(uint8arr, 0, 0, buffer.byteLength) + return new DataView(uint8arr.buffer) + } else { + return new DataView(buffer) + } +} + +const validateTiffSignature = (buffer, tiffId) => { + const dataView = getDataView(buffer) + const littleEndian = dataView.getUint16(0) === 0x4949 + + return dataView.getUint16(2, littleEndian) === tiffId +} + +export default validateTiffSignature From 2ce47e23483aceb676ad73de9b8b1ceb09d20547 Mon Sep 17 00:00:00 2001 From: Yuan Wang Date: Mon, 3 Jan 2022 14:35:27 -0700 Subject: [PATCH 067/101] Change an error to warning --- bids-validator/utils/issues/list.js | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/bids-validator/utils/issues/list.js b/bids-validator/utils/issues/list.js index 511383e76..1c17b2dc3 100644 --- a/bids-validator/utils/issues/list.js +++ b/bids-validator/utils/issues/list.js @@ -1110,9 +1110,8 @@ export default { }, 226: { key: 'UNSUPPORTED_BIG_TIFF', - severity: 'error', - reason: - 'BigTiff OME-TIFF file is not supported for consistency check right now, please use Tiff OME-TIFF file instead', + severity: 'warning', + reason: 'Metadata consistency check skipped for BigTiff OME-TIFF file', }, 227: { key: 'INCONSISTENT_TIFF_EXTENSION', From b7d18dc2e875c89c4ece9ed3b0d97fdc203c8237 Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Thu, 6 Jan 2022 12:22:34 -0600 Subject: [PATCH 068/101] add tests for checkJSONAndField --- .../__tests__/checkJSONAndField.spec.js | 51 +++++++++++++++++++ .../microscopy/checkJSONAndField.js | 40 ++++++++------- 2 files changed, 73 insertions(+), 18 deletions(-) create mode 100644 bids-validator/validators/microscopy/__tests__/checkJSONAndField.spec.js diff --git a/bids-validator/validators/microscopy/__tests__/checkJSONAndField.spec.js b/bids-validator/validators/microscopy/__tests__/checkJSONAndField.spec.js new file mode 100644 index 000000000..62fa586b5 --- /dev/null +++ b/bids-validator/validators/microscopy/__tests__/checkJSONAndField.spec.js @@ -0,0 +1,51 @@ +import { assert } from 'chai' +import checkJSONAndField from '../checkJSONAndField' + +describe('checkJSONAndField()', () => { + const emptyJsonContentsDict = { + 'test.json': {}, + } + it('returns no issues with empty arguments', () => { + const issues = checkJSONAndField({}, {}) + expect(issues.length).toBe(0) + }) + + it('returns issue 225 with no json for ome files', () => { + const files = { + ome: [{ relativePath: 'test.ome.tif' }], + } + const issues = checkJSONAndField(files, emptyJsonContentsDict) + expect(issues.length).toBe(1) + expect(issues[0].code).toBe(225) + }) + + it('returns issue 225 with no json for tif files', () => { + const files = { + tif: [{ relativePath: 'test.tif' }], + } + const issues = checkJSONAndField(files, emptyJsonContentsDict) + expect(issues.length).toBe(1) + expect(issues[0].code).toBe(225) + }) + + it('returns issue 225 with no json for png files', () => { + const files = { + png: [{ relativePath: 'test.png' }], + } + const issues = checkJSONAndField(files, emptyJsonContentsDict) + expect(issues.length).toBe(1) + expect(issues[0].code).toBe(225) + }) + + it('returns warning 223 if chunk entity present but missing metadata', () => { + const files = { + ome: [{ relativePath: '/test_chunk-01.ome.tif' }], + } + const jsonContentsDict = { + '/test_chunk-01.json': { testKey: 'testValue' }, + } + const issues = checkJSONAndField(files, jsonContentsDict) + expect(issues.length).toBe(1) + expect(issues[0].code).toBe(223) + }) +}) diff --git a/bids-validator/validators/microscopy/checkJSONAndField.js b/bids-validator/validators/microscopy/checkJSONAndField.js index be338a256..ea4ee1526 100644 --- a/bids-validator/validators/microscopy/checkJSONAndField.js +++ b/bids-validator/validators/microscopy/checkJSONAndField.js @@ -9,7 +9,6 @@ const checkJSONAndField = (files, jsonContentsDict) => { .replace('.tif', '') .replace('.btf', '') .replace('.ome', '.json') - issues = issues.concat( ifJsonExist(file, possibleJsonPath, jsonContentsDict), ) @@ -40,11 +39,20 @@ const checkJSONAndField = (files, jsonContentsDict) => { const ifJsonExist = (file, possibleJsonPath, jsonContentsDict) => { let potentialSidecars = utils.files.potentialLocations(possibleJsonPath) + console.log(potentialSidecars) + const chunkRegex = new RegExp('_chunk-[0-9]+') + + const jsonChunkFiles = potentialSidecars.filter( + name => jsonContentsDict.hasOwnProperty(name) && chunkRegex.exec(name), + ) + const chunkPresent = + jsonChunkFiles.length || chunkRegex.exec(file.relativePath) const mergedDictionary = utils.files.generateMergedSidecarDictWithPath( potentialSidecars, jsonContentsDict, ) + //console.log(mergedDictionary) // check if the given file has a corresponding JSON file if (Object.keys(mergedDictionary).length === 0) { @@ -54,30 +62,26 @@ const ifJsonExist = (file, possibleJsonPath, jsonContentsDict) => { code: 225, }), ] - } else { + } + + if (chunkPresent) { return checkMatrixField(file, mergedDictionary) } } const checkMatrixField = (file, mergedDictionary) => { let issues = [] - let regex = new RegExp('_chunk-[0-9]+') - let jsonPath = mergedDictionary.sidecarName - - // ChunkTransformationMatrix is RECOMMENDED if is used in filenames - if (regex.exec(file.relativePath) || regex.exec(jsonPath)) { - if (!mergedDictionary.hasOwnProperty('ChunkTransformationMatrix')) { - issues.push( - new Issue({ - file: { - path: jsonPath, - relativePath: jsonPath, - }, - code: 223, - }), - ) - } + if (!mergedDictionary.hasOwnProperty('ChunkTransformationMatrix')) { + issues.push( + new Issue({ + file: { + path: file, + }, + code: 223, + }), + ) } + // } return issues } From c9712e11461b5edc7cad585a7061f1eafb5601bb Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Thu, 6 Jan 2022 12:44:33 -0600 Subject: [PATCH 069/101] add tests for checkSample.js --- .../microscopy/__tests__/checkSample.spec.js | 19 +++++++++++++++++++ .../microscopy/checkJSONAndField.js | 2 -- 2 files changed, 19 insertions(+), 2 deletions(-) create mode 100644 bids-validator/validators/microscopy/__tests__/checkSample.spec.js diff --git a/bids-validator/validators/microscopy/__tests__/checkSample.spec.js b/bids-validator/validators/microscopy/__tests__/checkSample.spec.js new file mode 100644 index 000000000..5dd541c27 --- /dev/null +++ b/bids-validator/validators/microscopy/__tests__/checkSample.spec.js @@ -0,0 +1,19 @@ +import checkSamples from '../checkSamples' +describe('checkSamples()', () => { + it('returns issue 214 when no samples.tsv is present', () => { + const fileList = { + '0': { relativePath: '/test.tsv' }, + } + const issues = checkSamples(fileList) + expect(issues.length).toBe(1) + expect(issues[0].code).toBe(214) + }) + + it('doesnt return issue 214 when samples.tsv is present', () => { + const fileList = { + '0': { relativePath: '/samples.tsv' }, + } + const issues = checkSamples(fileList) + expect(issues.length).toBe(0) + }) +}) diff --git a/bids-validator/validators/microscopy/checkJSONAndField.js b/bids-validator/validators/microscopy/checkJSONAndField.js index ea4ee1526..6047d7b66 100644 --- a/bids-validator/validators/microscopy/checkJSONAndField.js +++ b/bids-validator/validators/microscopy/checkJSONAndField.js @@ -39,7 +39,6 @@ const checkJSONAndField = (files, jsonContentsDict) => { const ifJsonExist = (file, possibleJsonPath, jsonContentsDict) => { let potentialSidecars = utils.files.potentialLocations(possibleJsonPath) - console.log(potentialSidecars) const chunkRegex = new RegExp('_chunk-[0-9]+') const jsonChunkFiles = potentialSidecars.filter( @@ -52,7 +51,6 @@ const ifJsonExist = (file, possibleJsonPath, jsonContentsDict) => { potentialSidecars, jsonContentsDict, ) - //console.log(mergedDictionary) // check if the given file has a corresponding JSON file if (Object.keys(mergedDictionary).length === 0) { From 4fc3c1a95f06c199e884f88c55f0bfe036e34c92 Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Thu, 6 Jan 2022 15:57:06 -0600 Subject: [PATCH 070/101] add tests for microscopy validate.js and test data --- .../microscopy/__tests__/data/btif_id.ome.tif | Bin 0 -> 2668 bytes .../__tests__/data/invalid_id.ome.tif | Bin 0 -> 2668 bytes .../microscopy/__tests__/data/tif_id.ome.btf | Bin 0 -> 2668 bytes .../microscopy/__tests__/data/tif_id.ome.tif | Bin 0 -> 2668 bytes .../microscopy/__tests__/data/valid.ome.tif | Bin 0 -> 2668 bytes .../microscopy/__tests__/validate.spec.js | 99 ++++++++++++++++++ 6 files changed, 99 insertions(+) create mode 100644 bids-validator/validators/microscopy/__tests__/data/btif_id.ome.tif create mode 100644 bids-validator/validators/microscopy/__tests__/data/invalid_id.ome.tif create mode 100644 bids-validator/validators/microscopy/__tests__/data/tif_id.ome.btf create mode 100644 bids-validator/validators/microscopy/__tests__/data/tif_id.ome.tif create mode 100644 bids-validator/validators/microscopy/__tests__/data/valid.ome.tif create mode 100644 bids-validator/validators/microscopy/__tests__/validate.spec.js diff --git a/bids-validator/validators/microscopy/__tests__/data/btif_id.ome.tif b/bids-validator/validators/microscopy/__tests__/data/btif_id.ome.tif new file mode 100644 index 0000000000000000000000000000000000000000..f62946161b9a6268d05460c73db5d9d930d0a7da GIT binary patch literal 2668 zcmeHH-EJF26dornbpnl&7PughI4o)@O7_QTQX*^ZsJ3ItC~+di5ox%nCc9&M=*W~^D z0$Cur+!`7EO1?lq=AZoP+Y&PRgXCkqee%=mdF7kcO$Kf<@IT7H52H8t2+1vy_>xH8 z3|<)HJnk)y@g2nPWpq|@3*;}ni+;RK$i0mGqYVBecRS(ULY?(7-a)*P(Rnn+i^yLY zW4zZ$HG^xpTM19>&%qei@$AzK&$qcdq=EZ3^2g6IzHwZWFW#Adk}ApLvy@+2US7Jp z^1;g8`)jMKYxg%k{1|^5o9myh3jKY3|mVm2#!|{Os&`|7HKxtKUX% zh`m4XJ-A|#3`E2`JU%6L zinT?=z!ew2i0oS>(FPHAh}{i=qeQ!O|#$c8zN+Z z&z(p}M}$K|L>)8MF=a1|eK&>AbYHm4OOWF2wrPN_mu;j4OnM+o+P~ACI$h>dX`+F- zZ55vA))OtYEDj`}v+h4m9N4z3f{ArVEp<#e=FnjpR3k=}h)P;GyyBrm8p(??;Xf2k z689Y6HNjou&3~lh8jCm=#z4AS**+MAEQ+))(BF z$gs_VF?>!*&2qdr1f~9_ONZtG~l=fao+KZ!&4@I17noXbN~PV literal 0 HcmV?d00001 diff --git a/bids-validator/validators/microscopy/__tests__/data/invalid_id.ome.tif b/bids-validator/validators/microscopy/__tests__/data/invalid_id.ome.tif new file mode 100644 index 0000000000000000000000000000000000000000..96973db510a36155af7258f39c11af9cb34b6bb6 GIT binary patch literal 2668 zcmeHH-EJF26dornbpnl&7PughI4o*u)$EVcq(s)*QEkVPQQ}03qo(1an(U74p}RZV zopEe)!yQjiiKpPcLc9QX6z+Kqb9UE1>o&jxkhNF)&6#t)Ilt3rj7akaA*0_%Z^(!F z1+qYLxivESm3)PO%s=_ncO_)>2g%2Jd*r7#^UBw&>kM3H;D3~XA4YHQ5RzLY@gl7m>d* z#(1xhY6jPGHxi!MpZzhe@MnW8RL6fQjByxfF-mpfh54HV@88bv*aQx9B@w(*IGLmE*uLl2P11);hu z0`7+*QWW_oj0PNgj7kQ~H$=z+ zpF5F|jtGZ_h&pDhW6E9_`)&%K>ArB8mmtO4ZPNf(nJGu z%PKt4ttVP)SsX|{XWf6CIIv||1rzI#TI!f`%%Q_Hs78z`5tX!Xc*#SFG?Eu(!hax~ zBR&6Y(AcWX>K5f9^nl)vdotS`7T zkzt!z1>2lim+b~d9rdQfvt>ejI4K!3u@o2IF-P%Bmbfrop&~S}y~jgw#61lTuwk1| z;|bZ;peg*gs}5-=;B7w1-?oL*gRJK4Nc6_1os>R7i4RYmS3Qo+g2C0OVr4AuW8Es~ zg`HO6>!P()+_r4?Bnv-WDgIdjf8=XV;7FG=$TA*0_%ugUxQ z1+qYLxivESl{`Q|=AZoP+Y&PRgXCkqJ@V7*dFAWXbq208@D63*htZomgya@Ud`Tp4 z1}}_p9`_c<_!i>#GCC`{1@af(ML*so%QWSHF$k z5PNUndvM7j$wg4o3WlWt3moC{pi|OLTXlU`+bi3j={kHvqX4BvP+cy;5x&m?1qf*X zM~D0R+2H~BOi`CA3KyPmUTi|Y%bhOj28!|kjiR2zsRu4c+xSGpA&n@Sp$AChf>2!+ z0rx`@DT@3PMgxvLMkNF0as>|UDK2`_8^S~02C9I1-WQt*!v%F-Krci$8Hk8?czjCg z6l;r!fh#Z!kivkvh`^6UJir~FB4@A(lCi`%4yx6t*P+iYKyt;3&{Zldnr6S>H$=z+ zpF5F|jtGZ_h&pDhW6E9_`)&%K>ArB8mmtO4ZPNf(nJGu z%PKt4ttVP)SsX|{XWf6CIIv||1rzI#TI!f`%%Q_Hs78z`5tX!Xc*#SFG?Eu(!hax~ zBR&6Y(AcWX>K5f9^nl)vsttS`7T zkzt!z1>2lim+b~d9rdQfvt>ejI4K!3u@o2IF-P%Bmbfrop&~S}y~jgw#61lTuwk1| z;|bZ;peg*gs}5-=;B7w1-?oL*gRJK4Nc6_1os>R7i4RYmmpzWng2C0OVr4AuW8Es~ zg`HO6tD?14+_r4?Bnv-WDgIdjf8=XV;7FG=$TA*0_%ugUxQ z1+qYLxivESl{`Q|=AZoP+Y&PRgXCkqJ@V7*dFAWXbq208@D63*htZomgya@Ud`Tp4 z1}}_p9`_c<_!i>#GCC`{1@af(ML*so%QWSHF$k z5PNUndvM7j$wg4o3WlWt3moC{pi|OLTXlU`+bi3j={kHvqX4BvP+cy;5x&m?1qf*X zM~D0R+2H~BOi`CA3KyPmUTi|Y%bhOj28!|kjiR2zsRu4c+xSGpA&n@Sp$AChf>2!+ z0rx`@DT@3PMgxvLMkNF0as>|UDK2`_8^S~02C9I1-WQt*!v%F-Krci$8Hk8?czjCg z6l;r!fh#Z!kivkvh`^6UJir~FB4@A(lCi`%4yx6t*P+iYKyt;3&{Zldnr6S>H$=z+ zpF5F|jtGZ_h&pDhW6E9_`)&%K>ArB8mmtO4ZPNf(nJGu z%PKt4ttVP)SsX|{XWf6CIIv||1rzI#TI!f`%%Q_Hs78z`5tX!Xc*#SFG?Eu(!hax~ zBR&6Y(AcWX>K5f9^nl)vsttS`7T zkzt!z1>2lim+b~d9rdQfvt>ejI4K!3u@o2IF-P%Bmbfrop&~S}y~jgw#61lTuwk1| z;|bZ;peg*gs}5-=;B7w1-?oL*gRJK4Nc6_1os>R7i4RYmmpzWng2C0OVr4AuW8Es~ zg`HO6tD?14+_r4?Bnv-WDgIdjf8=XV;7FG=$TA*0_%ugUxQ z1+qYLxivESl{`Q|=AZoP+Y&PRgXCkqJ@V7*dFAWXbq208@D63*htZomgya@Ud`Tp4 z1}}_p9`_c<_!i>#GCC`{1@af(ML*so%QWSHF$k z5PNUndvM7j$wg4o3WlWt3moC{pi|OLTXlU`+bi3j={kHvqX4BvP+cy;5x&m?1qf*X zM~D0R+2H~BOi`CA3KyPmUTi|Y%bhOj28!|kjiR2zsRu4c+xSGpA&n@Sp$AChf>2!+ z0rx`@DT@3PMgxvLMkNF0as>|UDK2`_8^S~02C9I1-WQt*!v%F-Krci$8Hk8?czjCg z6l;r!fh#Z!kivkvh`^6UJir~FB4@A(lCi`%4yx6t*P+iYKyt;3&{Zldnr6S>H$=z+ zpF5F|jtGZ_h&pDhW6E9_`)&%K>ArB8mmtO4ZPNf(nJGu z%PKt4ttVP)SsX|{XWf6CIIv||1rzI#TI!f`%%Q_Hs78z`5tX!Xc*#SFG?Eu(!hax~ zBR&6Y(AcWX>K5f9^nl)vsttS`7T zkzt!z1>2lim+b~d9rdQfvt>ejI4K!3u@o2IF-P%Bmbfrop&~S}y~jgw#61lTuwk1| z;|bZ;peg*gs}5-=;B7w1-?oL*gRJK4Nc6_1os>R7i4RYmmpzWng2C0OVr4AuW8Es~ zg`HO6tD?14+_r4 { + it('returns error 227 with extension/id mismatch', () => { + const fileName = 'btif_id.ome.tif' + const files = [ + { + name: fileName, + relativePath: `/bids-validator/validators/microscopy/__tests__/data/${fileName}`, + path: path.join(dataDir, fileName), + }, + ] + + expect.assertions(3) + return validate(files, {}).then(issues => { + expect(issues.length).toBe(2) + expect(issues[0].code).toBe(227) + expect(issues[1].code).toBe(226) + }) + }) + + it('returns error 227 with incorrect id in magic number', () => { + const fileName = 'invalid_id.ome.tif' + const files = [ + { + name: fileName, + relativePath: `/bids-validator/validators/microscopy/__tests__/data/${fileName}`, + path: path.join(dataDir, fileName), + }, + ] + expect.assertions(2) + return validate(files, {}).then(issues => { + expect(issues.length).toBe(1) + expect(issues[0].code).toBe(227) + }) + }) + + it('returns error 227 with tif id and btf extension', () => { + const fileName = 'tif_id.ome.btf' + const files = [ + { + name: fileName, + relativePath: `/bids-validator/validators/microscopy/__tests__/data/${fileName}`, + path: path.join(dataDir, fileName), + }, + ] + + expect.assertions(2) + return validate(files, {}).then(issues => { + expect(issues.length).toBe(1) + expect(issues[0].code).toBe(227) + }) + }) + + it('validates with valid data', () => { + const fileName = 'valid.ome.tif' + const relativePath = `/bids-validator/validators/microscopy/__tests__/data/${fileName}` + const files = [ + { + name: fileName, + relativePath: relativePath, + path: path.join(dataDir, fileName), + }, + ] + const jsonContentDict = {} + jsonContentDict[relativePath.replace('.ome.tif', '.json')] = jsonContent + + expect.assertions(1) + return validate(files, jsonContentDict).then(issues => { + expect(issues.length).toBe(0) + }) + }) +}) From 9ce48d7c424bc2d8da91a9f67609e1dc5bf0ccd0 Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Thu, 6 Jan 2022 15:57:56 -0600 Subject: [PATCH 071/101] add param docs to readFile function --- bids-validator/utils/files/readFile.js | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/bids-validator/utils/files/readFile.js b/bids-validator/utils/files/readFile.js index 823299b21..4f420d6c7 100644 --- a/bids-validator/utils/files/readFile.js +++ b/bids-validator/utils/files/readFile.js @@ -25,7 +25,11 @@ const checkEncoding = (file, data, cb) => { } /** - * Read + * readFile + * @param {object | File} file - nodeJS fs file or browser File + * @param {boolean} annexed - is the file currently annexed? + * @param {string} dir - path to directory containing dataset. Only used if + * annexed is true. * * A helper method for reading file contents. * Takes a file object and a callback and calls From 87ec79ed7023a7a5e176d36d9275d47fb2513400 Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Thu, 6 Jan 2022 15:58:30 -0600 Subject: [PATCH 072/101] generate error if magic number is incorrect on tiff file --- bids-validator/validators/microscopy/validate.js | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/bids-validator/validators/microscopy/validate.js b/bids-validator/validators/microscopy/validate.js index 650ed0996..c4d90e97e 100644 --- a/bids-validator/validators/microscopy/validate.js +++ b/bids-validator/validators/microscopy/validate.js @@ -55,6 +55,15 @@ const validate = (files, jsonContentsDict) => { }), ) resolve() + } else { + issues.push( + new Issue({ + code: 227, + file: file, + evidence: `3rd byte of file does not identify file as tiff.`, + }), + ) + resolve() } }) }), From 3a88e3e27cf3c83b6a7880ebbf9f11a85247d7ff Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Fri, 7 Jan 2022 11:25:09 -0600 Subject: [PATCH 073/101] move new dependencies into validator package.json --- bids-validator/package.json | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bids-validator/package.json b/bids-validator/package.json index a01350336..054da6d47 100644 --- a/bids-validator/package.json +++ b/bids-validator/package.json @@ -58,7 +58,9 @@ "stream-browserify": "^3.0.0", "table": "^5.2.3", "yaml": "^1.10.2", - "yargs": "^16.2.0" + "yargs": "^16.2.0", + "exifreader": "^4.1.0", + "xml2js": "^0.4.23" }, "devDependencies": { "adm-zip": "", From 6f906e9cc4cce5ac468c154b830517e62c7ed2ff Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Fri, 7 Jan 2022 11:27:26 -0600 Subject: [PATCH 074/101] have micro json check use old sidecar generation function --- .../generateMergedSidecarDictWithPath.js | 26 ------------------- .../microscopy/checkJSONAndField.js | 2 +- 2 files changed, 1 insertion(+), 27 deletions(-) delete mode 100644 bids-validator/utils/files/generateMergedSidecarDictWithPath.js diff --git a/bids-validator/utils/files/generateMergedSidecarDictWithPath.js b/bids-validator/utils/files/generateMergedSidecarDictWithPath.js deleted file mode 100644 index 78e9fbe01..000000000 --- a/bids-validator/utils/files/generateMergedSidecarDictWithPath.js +++ /dev/null @@ -1,26 +0,0 @@ -/** - * Generate Merged Sidecar Dictionary - * - * Takes an array of potential sidecards and a - * master object dictionary of all JSON file - * content and returns a merged dictionary - * containing all values from the potential - * sidecars and the sidecarName - */ -function generateMergedSidecarDictWithPath(potentialSidecars, jsonContents) { - let mergedDictionary = {} - potentialSidecars.map(sidecarName => { - const jsonObject = jsonContents[sidecarName] - if (jsonObject) { - mergedDictionary['sidecarName'] = sidecarName - for (var key in jsonObject) { - mergedDictionary[key] = jsonObject[key] - } - } else if (jsonObject === null) { - mergedDictionary.invalid = true - } - }) - return mergedDictionary -} - -export default generateMergedSidecarDictWithPath diff --git a/bids-validator/validators/microscopy/checkJSONAndField.js b/bids-validator/validators/microscopy/checkJSONAndField.js index 6047d7b66..a77e6a8b4 100644 --- a/bids-validator/validators/microscopy/checkJSONAndField.js +++ b/bids-validator/validators/microscopy/checkJSONAndField.js @@ -47,7 +47,7 @@ const ifJsonExist = (file, possibleJsonPath, jsonContentsDict) => { const chunkPresent = jsonChunkFiles.length || chunkRegex.exec(file.relativePath) - const mergedDictionary = utils.files.generateMergedSidecarDictWithPath( + const mergedDictionary = utils.files.generateMergedSidecarDict( potentialSidecars, jsonContentsDict, ) From 8c9e1e1756c03f15dd86af1e72a03f1f75e3af35 Mon Sep 17 00:00:00 2001 From: Yuan Wang Date: Wed, 5 Jan 2022 22:01:33 -0700 Subject: [PATCH 075/101] Update validation for OME namespace --- .../validators/microscopy/ometiff.js | 48 ++++++++++++++----- 1 file changed, 36 insertions(+), 12 deletions(-) diff --git a/bids-validator/validators/microscopy/ometiff.js b/bids-validator/validators/microscopy/ometiff.js index 5fa434cbc..6f8bf8a5b 100644 --- a/bids-validator/validators/microscopy/ometiff.js +++ b/bids-validator/validators/microscopy/ometiff.js @@ -14,16 +14,23 @@ export default function ometiff(file, omeData, jsonContentsDict, callback) { let mergedDictionary = getMergedDictionary(file, jsonContentsDict) + let rootKey = Object.keys(omeData)[0] + let namespace = '' + if (rootKey.includes(':OME')) { + namespace = rootKey.split(':OME')[0].concat(':') + } + // Check for consistency with optional OME-TIFF metadata if present for // Immersion, NumericalAperture and Magnification let optionalFieldsIssues = checkOptionalFields( file.relativePath, omeData, + namespace, mergedDictionary, ) // Check for consistency for PixelSize between JSON and OME-TIFF metadata - let pixelSizeIssues = checkPixelSize(omeData, mergedDictionary) + let pixelSizeIssues = checkPixelSize(omeData, namespace, mergedDictionary) issues = issues.concat(optionalFieldsIssues).concat(pixelSizeIssues) @@ -67,7 +74,7 @@ const getMergedDictionary = (file, jsonContentsDict) => { ) } -const checkOptionalFields = (omePath, omeData, jsonData) => { +const checkOptionalFields = (omePath, omeData, namespace, jsonData) => { let issues = [] let fields = { @@ -77,10 +84,15 @@ const checkOptionalFields = (omePath, omeData, jsonData) => { } if ( - omeData['OME']['Instrument'] && - omeData['OME']['Instrument'][0]['Objective'] + omeData[`${namespace}OME`][`${namespace}Instrument`] && + omeData[`${namespace}OME`][`${namespace}Instrument`][0][ + `${namespace}Objective` + ] ) { - let objective = omeData['OME']['Instrument'][0]['Objective'][0]['$'] + let objective = + omeData[`${namespace}OME`][`${namespace}Instrument`][0][ + `${namespace}Objective` + ][0]['$'] for (let field in fields) { let property = fields[field] if (jsonData.hasOwnProperty(field) && objective[property]) { @@ -103,22 +115,34 @@ const checkOptionalFields = (omePath, omeData, jsonData) => { return issues } -const checkPixelSize = (omeData, jsonData) => { +const checkPixelSize = (omeData, namespace, jsonData) => { let issues = [] let validUnits = ['um', 'µm', 'nm', 'mm'] const PhysicalSizeX = - omeData['OME']['Image'][0]['Pixels'][0]['$']['PhysicalSizeX'] + omeData[`${namespace}OME`][`${namespace}Image`][0][`${namespace}Pixels`][0][ + '$' + ]['PhysicalSizeX'] const physicalSizeXUnit = - omeData['OME']['Image'][0]['Pixels'][0]['$']['PhysicalSizeXUnit'] + omeData[`${namespace}OME`][`${namespace}Image`][0][`${namespace}Pixels`][0][ + '$' + ]['PhysicalSizeXUnit'] const PhysicalSizeY = - omeData['OME']['Image'][0]['Pixels'][0]['$']['PhysicalSizeY'] + omeData[`${namespace}OME`][`${namespace}Image`][0][`${namespace}Pixels`][0][ + '$' + ]['PhysicalSizeY'] const physicalSizeYUnit = - omeData['OME']['Image'][0]['Pixels'][0]['$']['PhysicalSizeYUnit'] + omeData[`${namespace}OME`][`${namespace}Image`][0][`${namespace}Pixels`][0][ + '$' + ]['PhysicalSizeYUnit'] const PhysicalSizeZ = - omeData['OME']['Image'][0]['Pixels'][0]['$']['PhysicalSizeZ'] + omeData[`${namespace}OME`][`${namespace}Image`][0][`${namespace}Pixels`][0][ + '$' + ]['PhysicalSizeZ'] const physicalSizeZUnit = - omeData['OME']['Image'][0]['Pixels'][0]['$']['PhysicalSizeZUnit'] + omeData[`${namespace}OME`][`${namespace}Image`][0][`${namespace}Pixels`][0][ + '$' + ]['PhysicalSizeZUnit'] // if no corresponding json file, skip the consistency check if (Object.keys(jsonData).length === 0) return [] From af3c7597004671f24d721cbbd1314128fa15c702 Mon Sep 17 00:00:00 2001 From: Yuan Wang Date: Thu, 6 Jan 2022 18:03:39 -0700 Subject: [PATCH 076/101] Fix code style --- .../validators/microscopy/ometiff.js | 32 +++++-------------- 1 file changed, 8 insertions(+), 24 deletions(-) diff --git a/bids-validator/validators/microscopy/ometiff.js b/bids-validator/validators/microscopy/ometiff.js index 6f8bf8a5b..6e6030375 100644 --- a/bids-validator/validators/microscopy/ometiff.js +++ b/bids-validator/validators/microscopy/ometiff.js @@ -85,14 +85,10 @@ const checkOptionalFields = (omePath, omeData, namespace, jsonData) => { if ( omeData[`${namespace}OME`][`${namespace}Instrument`] && - omeData[`${namespace}OME`][`${namespace}Instrument`][0][ - `${namespace}Objective` - ] + omeData[`${namespace}OME`][`${namespace}Instrument`][0][`${namespace}Objective`] ) { let objective = - omeData[`${namespace}OME`][`${namespace}Instrument`][0][ - `${namespace}Objective` - ][0]['$'] + omeData[`${namespace}OME`][`${namespace}Instrument`][0][`${namespace}Objective`][0]['$'] for (let field in fields) { let property = fields[field] if (jsonData.hasOwnProperty(field) && objective[property]) { @@ -120,29 +116,17 @@ const checkPixelSize = (omeData, namespace, jsonData) => { let validUnits = ['um', 'µm', 'nm', 'mm'] const PhysicalSizeX = - omeData[`${namespace}OME`][`${namespace}Image`][0][`${namespace}Pixels`][0][ - '$' - ]['PhysicalSizeX'] + omeData[`${namespace}OME`][`${namespace}Image`][0][`${namespace}Pixels`][0]['$']['PhysicalSizeX'] const physicalSizeXUnit = - omeData[`${namespace}OME`][`${namespace}Image`][0][`${namespace}Pixels`][0][ - '$' - ]['PhysicalSizeXUnit'] + omeData[`${namespace}OME`][`${namespace}Image`][0][`${namespace}Pixels`][0]['$']['PhysicalSizeXUnit'] const PhysicalSizeY = - omeData[`${namespace}OME`][`${namespace}Image`][0][`${namespace}Pixels`][0][ - '$' - ]['PhysicalSizeY'] + omeData[`${namespace}OME`][`${namespace}Image`][0][`${namespace}Pixels`][0]['$']['PhysicalSizeY'] const physicalSizeYUnit = - omeData[`${namespace}OME`][`${namespace}Image`][0][`${namespace}Pixels`][0][ - '$' - ]['PhysicalSizeYUnit'] + omeData[`${namespace}OME`][`${namespace}Image`][0][`${namespace}Pixels`][0]['$']['PhysicalSizeYUnit'] const PhysicalSizeZ = - omeData[`${namespace}OME`][`${namespace}Image`][0][`${namespace}Pixels`][0][ - '$' - ]['PhysicalSizeZ'] + omeData[`${namespace}OME`][`${namespace}Image`][0][`${namespace}Pixels`][0]['$']['PhysicalSizeZ'] const physicalSizeZUnit = - omeData[`${namespace}OME`][`${namespace}Image`][0][`${namespace}Pixels`][0][ - '$' - ]['PhysicalSizeZUnit'] + omeData[`${namespace}OME`][`${namespace}Image`][0][`${namespace}Pixels`][0]['$']['PhysicalSizeZUnit'] // if no corresponding json file, skip the consistency check if (Object.keys(jsonData).length === 0) return [] From 7f4b5b2811a73592d9994806927274399c4d03f6 Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Fri, 7 Jan 2022 12:13:26 -0600 Subject: [PATCH 077/101] rearrange logic for samples.tsv validation. Add tests. --- bids-validator/tests/tsv.spec.js | 26 +++++++++ bids-validator/validators/tsv/tsv.js | 86 +++++++++++++++------------- 2 files changed, 73 insertions(+), 39 deletions(-) diff --git a/bids-validator/tests/tsv.spec.js b/bids-validator/tests/tsv.spec.js index ef94134d2..9a477f75f 100644 --- a/bids-validator/tests/tsv.spec.js +++ b/bids-validator/tests/tsv.spec.js @@ -685,4 +685,30 @@ describe('TSV', function() { let issues = validate.TSV.validateContRec([physio_file], {}) assert(issues.length === 1 && issues[0].code === 133) }) + + // samples checks ----------------------------------------------------------- + + const samplesFile = { + name: 'samples.tsv', + relativePath: '/samples.tsv', + } + + it('should return errors for each missing mandatory header in samples.tsv', () => { + const tsv = 'wrong_col\nsome_data\n' + validate.TSV.TSV(samplesFile, tsv, [], function(issues) { + expect(issues.length).toBe(3) + const codes = issues.map(x => x.code) + expect(codes.includes(216)).toBe(true) + expect(codes.includes(217)).toBe(true) + expect(codes.includes(218)).toBe(true) + }) + }) + + it('should return an error for invalid sample_type samples.tsv', () => { + const tsv = 'sample_type\nbad\n' + validate.TSV.TSV(samplesFile, tsv, [], function(issues) { + const codes = issues.map(x => x.code) + expect(codes.includes(219)).toBe(true) + }) + }) }) diff --git a/bids-validator/validators/tsv/tsv.js b/bids-validator/validators/tsv/tsv.js index 7d3ad2c57..d83b84871 100644 --- a/bids-validator/validators/tsv/tsv.js +++ b/bids-validator/validators/tsv/tsv.js @@ -245,16 +245,17 @@ const TSV = (file, contents, fileList, callback) => { // samples.tsv let samples = null if (file.name === 'samples.tsv') { + const sampleIssues = [] const sampleIdColumnValues = [] const participantIdColumnValues = [] const sampleIdColumn = headers.indexOf('sample_id') const participantIdColumn = headers.indexOf('participant_id') const sampleTypeColumn = headers.indexOf('sample_type') - // if the sample_id column is missing, an error + // if the sample_id column is missing, an error // will be raised if (sampleIdColumn === -1) { - issues.push( + sampleIssues.push( new Issue({ file: file, evidence: headersEvidence(headers), @@ -263,10 +264,10 @@ const TSV = (file, contents, fileList, callback) => { }), ) } - // if the participant_id column is missing, an error + // if the participant_id column is missing, an error // will be raised - else if (participantIdColumn === -1) { - issues.push( + if (participantIdColumn === -1) { + sampleIssues.push( new Issue({ file: file, evidence: headersEvidence(headers), @@ -275,10 +276,10 @@ const TSV = (file, contents, fileList, callback) => { }), ) } - // if the sample_type column is missing, an error + // if the sample_type column is missing, an error // will be raised - else if (sampleTypeColumn === -1) { - issues.push( + if (sampleTypeColumn === -1) { + sampleIssues.push( new Issue({ file: file, evidence: headersEvidence(headers), @@ -286,8 +287,12 @@ const TSV = (file, contents, fileList, callback) => { code: 218, }), ) - } else { - // otherwise, the samples should comprise of + } + // Fold sampleIssues into main issue array, only needed it for this + // conditional. + issues.push(...sampleIssues) + if (sampleIssues.length === 0) { + // otherwise, the samples should comprise of // sample- and one sample per row samples = [] for (let l = 1; l < rows.length; l++) { @@ -304,15 +309,15 @@ const TSV = (file, contents, fileList, callback) => { new Issue({ file: file, evidence: row[sampleIdColumn], - reason: 'sample_id column should be named ' + - 'as sample-.', + reason: + 'sample_id column should be named ' + 'as sample-.', line: l, code: 215, }), ) } } - // The participants should comprise of + // The participants should comprise of // sub- and one subject per row participants = [] for (let l = 1; l < rows.length; l++) { @@ -329,8 +334,9 @@ const TSV = (file, contents, fileList, callback) => { new Issue({ file: file, evidence: row[participantIdColumn], - reason: 'Participant_id column should be named ' + - 'as sub-.', + reason: + 'Participant_id column should be named ' + + 'as sub-.', line: l, code: 212, }), @@ -368,30 +374,32 @@ const TSV = (file, contents, fileList, callback) => { } } - // check if any incorrect patterns in sample_type column - const validSampleTypes = [ - 'cell line', - 'in vitro differentiated cells', - 'primary cell', - 'cell-free sample', - 'cloning host', - 'tissue', - 'whole organisms', - 'organoid', - 'technical sample', - ] - for (let c = 1; c < rows.length; c++) { - const row = rows[c] - if (!validSampleTypes.includes(row[sampleTypeColumn])) { - issues.push( - new Issue({ - file: file, - evidence: row[sampleTypeColumn], - reason: "sample_type can't be any value.", - line: c + 1, - code: 219, - }), - ) + if (sampleTypeColumn !== -1) { + // check if any incorrect patterns in sample_type column + const validSampleTypes = [ + 'cell line', + 'in vitro differentiated cells', + 'primary cell', + 'cell-free sample', + 'cloning host', + 'tissue', + 'whole organisms', + 'organoid', + 'technical sample', + ] + for (let c = 1; c < rows.length; c++) { + const row = rows[c] + if (!validSampleTypes.includes(row[sampleTypeColumn])) { + issues.push( + new Issue({ + file: file, + evidence: row[sampleTypeColumn], + reason: "sample_type can't be any value.", + line: c + 1, + code: 219, + }), + ) + } } } } From a0e2e31fd63acaf632a6cedcc648803810004a2b Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Fri, 7 Jan 2022 12:26:05 -0600 Subject: [PATCH 078/101] updated package-lock to reflect package.json changes. --- package-lock.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/package-lock.json b/package-lock.json index 0e921ed78..6a6aba6bc 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,10 +9,6 @@ "bids-validator", "bids-validator-web" ], - "dependencies": { - "exifreader": "^4.1.0", - "xml2js": "^0.4.23" - }, "devDependencies": { "@babel/core": "^7.7.2", "@babel/preset-env": "^7.7.1", @@ -33,6 +29,7 @@ "cross-fetch": "^3.0.6", "date-fns": "^2.7.0", "events": "^3.3.0", + "exifreader": "^4.1.0", "hed-validator": "^3.5.0", "ignore": "^4.0.2", "is-utf8": "^0.2.1", @@ -47,6 +44,7 @@ "semver": "^7.3.2", "stream-browserify": "^3.0.0", "table": "^5.2.3", + "xml2js": "^0.4.23", "yaml": "^1.10.2", "yargs": "^16.2.0" }, @@ -27795,6 +27793,7 @@ "eslint-config-prettier": "^2.9.0", "eslint-plugin-prettier": "^2.6.2", "events": "^3.3.0", + "exifreader": "^4.1.0", "hed-validator": "^3.5.0", "husky": "^1.0.0-rc.13", "ignore": "^4.0.2", @@ -27814,6 +27813,7 @@ "stream-browserify": "^3.0.0", "sync-request": "6.0.0", "table": "^5.2.3", + "xml2js": "^0.4.23", "yaml": "^1.10.2", "yargs": "^16.2.0" }, From c725d7c1e114905db3feafb53d794006729138e5 Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Tue, 11 Jan 2022 16:25:49 -0600 Subject: [PATCH 079/101] have microscopy json check return empty list if no other issues found --- bids-validator/validators/bids/fullTest.js | 1 + bids-validator/validators/microscopy/checkJSONAndField.js | 7 +++---- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/bids-validator/validators/bids/fullTest.js b/bids-validator/validators/bids/fullTest.js index 67061b534..41c4cff88 100644 --- a/bids-validator/validators/bids/fullTest.js +++ b/bids-validator/validators/bids/fullTest.js @@ -148,6 +148,7 @@ const fullTest = (fileList, options, annexed, dir, schema, callback) => { files, jsonContentsDict, ) + self.issues = self.issues .concat(samplesIssues) .concat(jsonAndFieldIssues) diff --git a/bids-validator/validators/microscopy/checkJSONAndField.js b/bids-validator/validators/microscopy/checkJSONAndField.js index a77e6a8b4..266f3481e 100644 --- a/bids-validator/validators/microscopy/checkJSONAndField.js +++ b/bids-validator/validators/microscopy/checkJSONAndField.js @@ -65,6 +65,8 @@ const ifJsonExist = (file, possibleJsonPath, jsonContentsDict) => { if (chunkPresent) { return checkMatrixField(file, mergedDictionary) } + + return [] } const checkMatrixField = (file, mergedDictionary) => { @@ -72,14 +74,11 @@ const checkMatrixField = (file, mergedDictionary) => { if (!mergedDictionary.hasOwnProperty('ChunkTransformationMatrix')) { issues.push( new Issue({ - file: { - path: file, - }, + file: file, code: 223, }), ) } - // } return issues } From 8f4663cfbdca61a4239cee97b080ba0df3daf480 Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Wed, 12 Jan 2022 14:23:41 -0600 Subject: [PATCH 080/101] set esbuild buffer size env in bin/bids-validator when we are using esbuild-runner --- bids-validator/bin/bids-validator | 1 + 1 file changed, 1 insertion(+) diff --git a/bids-validator/bin/bids-validator b/bids-validator/bin/bids-validator index 65840cd95..e5221accf 100755 --- a/bids-validator/bin/bids-validator +++ b/bids-validator/bin/bids-validator @@ -8,6 +8,7 @@ function entry(cli) { try { // Test if there's a development tree to run require.resolve('../cli.js') + process.env.ESBUILD_MAX_BUFFER = 64 * 1024 * 1024 // For dev, use esbuild-runner require('esbuild-runner/register') const { default: cli } = require('../cli.js') From b4c897295aa6ac600b7c08b4aeb7e9ae0a16654c Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Wed, 12 Jan 2022 14:59:16 -0600 Subject: [PATCH 081/101] added babel runtime to web package.json, maybe it was just my setup being wonky. --- bids-validator-web/package.json | 1 + package-lock.json | 23 +++++++++++++++++++++++ 2 files changed, 24 insertions(+) diff --git a/bids-validator-web/package.json b/bids-validator-web/package.json index 3f77e0643..75dd8310d 100644 --- a/bids-validator-web/package.json +++ b/bids-validator-web/package.json @@ -5,6 +5,7 @@ "main": "index.js", "license": "MIT", "dependencies": { + "@babel/runtime": "^7.16.7", "bootstrap": "^4.3.0", "bowser": "^1.0.0", "next": "^11.1.2", diff --git a/package-lock.json b/package-lock.json index 6a6aba6bc..6b0d14546 100644 --- a/package-lock.json +++ b/package-lock.json @@ -75,6 +75,7 @@ "version": "1.8.10-dev.0", "license": "MIT", "dependencies": { + "@babel/runtime": "^7.16.7", "bootstrap": "^4.3.0", "bowser": "^1.0.0", "next": "^11.1.2", @@ -86,6 +87,17 @@ "sass": "^1.32.8" } }, + "bids-validator-web/node_modules/@babel/runtime": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.16.7.tgz", + "integrity": "sha512-9E9FJowqAsytyOY6LG+1KuueckRL+aQW+mKvXRXnuFGyRAyepJPmEo9vgMfXUA6O9u3IeEdv9MAkppFcaQwogQ==", + "dependencies": { + "regenerator-runtime": "^0.13.4" + }, + "engines": { + "node": ">=6.9.0" + } + }, "bids-validator/node_modules/ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", @@ -27911,6 +27923,7 @@ "bids-validator-web": { "version": "file:bids-validator-web", "requires": { + "@babel/runtime": "^7.16.7", "bootstrap": "^4.3.0", "bowser": "^1.0.0", "next": "^11.1.2", @@ -27920,6 +27933,16 @@ "react-bootstrap": "^1.0.0-beta.5", "react-dom": "^17.0.2", "sass": "^1.32.8" + }, + "dependencies": { + "@babel/runtime": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.16.7.tgz", + "integrity": "sha512-9E9FJowqAsytyOY6LG+1KuueckRL+aQW+mKvXRXnuFGyRAyepJPmEo9vgMfXUA6O9u3IeEdv9MAkppFcaQwogQ==", + "requires": { + "regenerator-runtime": "^0.13.4" + } + } } }, "big.js": { From 1918f80aeb05d9218f62b238ee02136975ac48bb Mon Sep 17 00:00:00 2001 From: Stefan Appelhoff Date: Thu, 20 Jan 2022 15:43:18 +0100 Subject: [PATCH 082/101] Add DatasetLinks to JSON schema (#1404) * first try * add functional DatasetLinks schema: any str bigger than len 0 --> uri * add tests * fix lint * use propertyNames feature, instead of not feature * migrated eeg_matchingpennies from GitHub to GIN * fix formatting E501 * Update bids-validator/tests/json.spec.js Co-authored-by: Chris Markiewicz * Update bids-validator/validators/json/schemas/dataset_description.json * fix tests Co-authored-by: Chris Markiewicz --- .../bids_validator/test_bids_validator.py | 4 +- bids-validator/tests/json.spec.js | 87 +++++++++++++++++++ .../json/schemas/dataset_description.json | 13 +++ 3 files changed, 103 insertions(+), 1 deletion(-) diff --git a/bids-validator/bids_validator/test_bids_validator.py b/bids-validator/bids_validator/test_bids_validator.py index 2f32b6256..a9cbe51bc 100644 --- a/bids-validator/bids_validator/test_bids_validator.py +++ b/bids-validator/bids_validator/test_bids_validator.py @@ -14,7 +14,9 @@ HOME = os.path.expanduser('~') TEST_DATA_DICT = { - 'eeg_matchingpennies': 'https://github.com/sappelhoff/eeg_matchingpennies' + 'eeg_matchingpennies': ( + 'https://gin.g-node.org/sappelhoff/eeg_matchingpennies' + ), } EXCLUDE_KEYWORDS = ['git', 'datalad', 'sourcedata', 'bidsignore'] diff --git a/bids-validator/tests/json.spec.js b/bids-validator/tests/json.spec.js index 4636ef5d3..4340ea8f2 100644 --- a/bids-validator/tests/json.spec.js +++ b/bids-validator/tests/json.spec.js @@ -467,6 +467,93 @@ describe('JSON', function() { relativePath: '/dataset_description.json', } + it('dataset_description.json should validate DatasetLinks', function() { + var jsonObj = { + Name: 'Example Name', + BIDSVersion: '1.4.0', + DatasetLinks: { + mylink: 'https://www.google.com', + deriv1: 'derivatives/derivative1', + phantoms: 'file:///data/phantoms', + ds000001: 'doi:10.18112/openneuro.ds000001.v1.0.0', + }, + } + jsonDict[dataset_description_file.relativePath] = jsonObj + validate.JSON(dataset_description_file, jsonDict, function(issues) { + assert(issues.length === 0) + }) + }) + + it('dataset_description.json should raise on bad keys in DatasetLinks', function() { + var jsonObj = { + Name: 'Example Name', + BIDSVersion: '1.4.0', + DatasetLinks: { + mylink: 'https://www.google.com', + '': 'https://www.yahoo.com', + 'mylink!': ':/path', + 'my link': ':/another/path', + }, + } + jsonDict[dataset_description_file.relativePath] = jsonObj + validate.JSON(dataset_description_file, jsonDict, function(issues) { + assert(issues.length === 6) + assert( + issues[0].evidence == + '.DatasetLinks should NOT be shorter than 1 characters', + ) + assert(issues[1].evidence == ".DatasetLinks property name '' is invalid") + assert( + issues[2].evidence == + '.DatasetLinks should match pattern "^[a-zA-Z0-9]*$"', + ) + assert( + issues[3].evidence == + ".DatasetLinks property name 'mylink!' is invalid", + ) + assert(issues[4].evidence == issues[2].evidence) + assert( + issues[5].evidence == + ".DatasetLinks property name 'my link' is invalid", + ) + }) + }) + + it('dataset_description.json should raise on non-object value in DatasetLinks', function() { + var jsonObj = { + Name: 'Example Name', + BIDSVersion: '1.4.0', + DatasetLinks: 'https://www.google.com', + } + jsonDict[dataset_description_file.relativePath] = jsonObj + validate.JSON(dataset_description_file, jsonDict, function(issues) { + assert(issues.length === 1) + assert(issues[0].evidence == '.DatasetLinks should be object') + }) + }) + + it('dataset_description.json should raise on invalid values in DatasetLinks', function() { + var jsonObj = { + Name: 'Example Name', + BIDSVersion: '1.4.0', + DatasetLinks: { + mylink1: 'https://www.google.com', + mylink2: 1, + '': 'https://www.yahoo.com', + }, + } + jsonDict[dataset_description_file.relativePath] = jsonObj + validate.JSON(dataset_description_file, jsonDict, function(issues) { + assert(issues.length === 3) + assert( + issues[0].evidence == + '.DatasetLinks should NOT be shorter than 1 characters', + ) + assert(issues[1].evidence == ".DatasetLinks property name '' is invalid") + assert(issues[2].evidence == ".DatasetLinks['mylink2'] should be string") + }) + }) + it('dataset_description.json should validate with enum of DatasetType', function() { var jsonObj = { Name: 'Example Name', diff --git a/bids-validator/validators/json/schemas/dataset_description.json b/bids-validator/validators/json/schemas/dataset_description.json index 4847d68f9..9b3904151 100644 --- a/bids-validator/validators/json/schemas/dataset_description.json +++ b/bids-validator/validators/json/schemas/dataset_description.json @@ -52,6 +52,19 @@ "DatasetDOI": { "type": "string" }, + "DatasetLinks": { + "type": "object", + "properties": { }, + "propertyNames": { + "type": "string", + "minLength": 1, + "pattern": "^[a-zA-Z0-9]*$" + }, + "additionalProperties": { + "type": "string", + "format": "uri-reference" + } + }, "GeneratedBy": { "type": "array", "minItems": 1, From cd627bba2fde89e718a30ba3fb8e8c153186a0e0 Mon Sep 17 00:00:00 2001 From: Nell Hardcastle Date: Thu, 10 Feb 2022 11:59:47 -0800 Subject: [PATCH 083/101] Fix NO_AUTHORS evidence zero length array passing array type as evidence --- bids-validator/validators/bids/checkDatasetDescription.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bids-validator/validators/bids/checkDatasetDescription.js b/bids-validator/validators/bids/checkDatasetDescription.js index 4a44a232c..6b535034a 100644 --- a/bids-validator/validators/bids/checkDatasetDescription.js +++ b/bids-validator/validators/bids/checkDatasetDescription.js @@ -59,7 +59,7 @@ const checkAuthorField = authors => { // if there are no authors, // warn user that errors could occur during doi minting // and that snapshots on OpenNeuro will not be allowed - issues.push(new Issue({ code: 113, evidence: authors })) + issues.push(new Issue({ code: 113, evidence: JSON.stringify(authors) })) } return issues } From 47a3bcd6b63d77c9e2db60e1995a9b96e2acf40f Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Thu, 10 Feb 2022 14:59:09 -0600 Subject: [PATCH 084/101] add option to throw an error if specified modalities are detected --- bids-validator/src/options.js | 6 +++++ bids-validator/tests/bids.spec.js | 30 ++++++++++++++++++++++ bids-validator/utils/consoleFormat.js | 12 ++++++--- bids-validator/utils/issues/list.js | 6 ++--- bids-validator/utils/options.js | 1 + bids-validator/validators/bids/fullTest.js | 14 ++++++++++ 6 files changed, 62 insertions(+), 7 deletions(-) diff --git a/bids-validator/src/options.js b/bids-validator/src/options.js index 63a392e1d..26f28f204 100644 --- a/bids-validator/src/options.js +++ b/bids-validator/src/options.js @@ -21,6 +21,12 @@ export function parseOptions(argumentOverride) { 'ignoreSubjectConsistency', 'Skip checking that any given file for one subject is present for all other subjects.', ) + .option('blacklistModalities', { + default: [], + describe: 'Array of modalities to error on if detected.', + array: true, + choices: [ 'MRI', 'PET', 'MEG', 'EEG', 'iEEG', 'Microscopy' ], + }) .boolean('verbose') .describe('verbose', 'Log more extensive information about issues') .boolean('json') diff --git a/bids-validator/tests/bids.spec.js b/bids-validator/tests/bids.spec.js index 66910a561..c33c8faff 100644 --- a/bids-validator/tests/bids.spec.js +++ b/bids-validator/tests/bids.spec.js @@ -218,6 +218,36 @@ describe('BIDS example datasets ', function() { }) }) +it('blacklists modalities specified', function(isdone) { + + const _options = { ...options, blacklistModalities: ['MRI'] } + validate.BIDS(createExampleFileList('ds001'), _options, function( + issues, + summary, + ) { + var errors = issues.errors + var warnings = issues.warnings + assert(summary.sessions.length === 0) + assert(summary.subjects.length === 16) + assert.deepEqual(summary.tasks, ['balloon analog risk task']) + assert(summary.modalities.includes('MRI')) + assert(summary.totalFiles === 134) + assert.deepEqual(errors.length, 2) + assert(warnings.length === 2) + assert( + warnings.findIndex(warning => warning.code === 13) > -1, + 'warnings do not contain a code 13', + ) + assert( + errors.findIndex(error => error.code === 139) > -1, + 'errors do contain a code 139', + ) + + isdone() + }) + }) + + it('checks for data dictionaries without corresponding data files', function(isdone) { validate.BIDS(createDatasetFileList('unused_data_dict'), options, function( issues, diff --git a/bids-validator/utils/consoleFormat.js b/bids-validator/utils/consoleFormat.js index dc0bca507..f2b6d83d2 100644 --- a/bids-validator/utils/consoleFormat.js +++ b/bids-validator/utils/consoleFormat.js @@ -66,19 +66,23 @@ function logIssues(issues, color, options) { if (!file || !file.file) { continue } - output.push('\t\t.' + file.file.relativePath) + let indent = '\t\t' + if (file.file.relativePath) { + output.push(`${indent}.` + file.file.relativePath) + indent = '\t\t\t' + } if (options.verbose) { - output.push('\t\t\t' + file.reason) + output.push(indent + file.reason) } if (file.line) { - var msg = '\t\t\t@ line: ' + file.line + var msg = `${indent}@ line: ` + file.line if (file.character) { msg += ' character: ' + file.character } output.push(msg) } if (file.evidence) { - output.push('\t\t\tEvidence: ' + file.evidence) + output.push(`${indent}Evidence: ` + file.evidence) } } if (issue.additionalFileCount > 0) { diff --git a/bids-validator/utils/issues/list.js b/bids-validator/utils/issues/list.js index 1c17b2dc3..b33a6b6f9 100644 --- a/bids-validator/utils/issues/list.js +++ b/bids-validator/utils/issues/list.js @@ -710,9 +710,9 @@ export default { "It is recommended to define 'PulseSequenceDetails' for this file. 'PulseSequenceDetails' is the information beyond pulse sequence type that identifies the specific pulse sequence used (for example, 'Standard Siemens Sequence distributed with the VB17 software', 'Siemens WIP ### version #.##', or 'Sequence written by X using a version compiled on MM/DD/YYYY').", }, 139: { - key: '139_EMPTY', - severity: 'warning', - reason: '', + key: 'BLACKLISTED_MODALITY', + severity: 'error', + reason: 'Found a modality that has been blacklisted through validator configuration.', }, 140: { key: '140_EMPTY', diff --git a/bids-validator/utils/options.js b/bids-validator/utils/options.js index 7d12c052f..46dc28d1c 100644 --- a/bids-validator/utils/options.js +++ b/bids-validator/utils/options.js @@ -17,6 +17,7 @@ export default { ignoreNiftiHeaders: Boolean(options.ignoreNiftiHeaders), ignoreSymlinks: Boolean(options.ignoreSymlinks), ignoreSubjectConsistency: Boolean(options.ignoreSubjectConsistency), + blacklistModalities: options.blacklistModalities, verbose: Boolean(options.verbose), gitTreeMode: Boolean(options.gitTreeMode), remoteFiles: Boolean(options.remoteFiles), diff --git a/bids-validator/validators/bids/fullTest.js b/bids-validator/validators/bids/fullTest.js index 41c4cff88..fe1c760d6 100644 --- a/bids-validator/validators/bids/fullTest.js +++ b/bids-validator/validators/bids/fullTest.js @@ -46,6 +46,20 @@ const fullTest = (fileList, options, annexed, dir, schema, callback) => { const summary = utils.collectSummary(fileList, self.options, schema) + if (self.options.blacklistModalities) { + self.options.blacklistModalities.map(mod => { + if (summary.modalities.includes(mod)) { + self.issues.push( + new Issue({ + file: mod, + evidence: `found ${mod} files`, + code: 139 + }) + ) + } + }) + } + // remove size redundancies for (const key in fileList) { if (fileList.hasOwnProperty(key)) { From e0f2f92cfa88a9a1d37384aa91391bd36626b6a3 Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Thu, 10 Feb 2022 15:02:24 -0600 Subject: [PATCH 085/101] eslint --fix --- bids-validator/src/options.js | 2 +- bids-validator/tests/bids.spec.js | 4 +--- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/bids-validator/src/options.js b/bids-validator/src/options.js index 26f28f204..5c7e15bc4 100644 --- a/bids-validator/src/options.js +++ b/bids-validator/src/options.js @@ -25,7 +25,7 @@ export function parseOptions(argumentOverride) { default: [], describe: 'Array of modalities to error on if detected.', array: true, - choices: [ 'MRI', 'PET', 'MEG', 'EEG', 'iEEG', 'Microscopy' ], + choices: ['MRI', 'PET', 'MEG', 'EEG', 'iEEG', 'Microscopy'], }) .boolean('verbose') .describe('verbose', 'Log more extensive information about issues') diff --git a/bids-validator/tests/bids.spec.js b/bids-validator/tests/bids.spec.js index c33c8faff..feb523dda 100644 --- a/bids-validator/tests/bids.spec.js +++ b/bids-validator/tests/bids.spec.js @@ -218,8 +218,7 @@ describe('BIDS example datasets ', function() { }) }) -it('blacklists modalities specified', function(isdone) { - + it('blacklists modalities specified', function(isdone) { const _options = { ...options, blacklistModalities: ['MRI'] } validate.BIDS(createExampleFileList('ds001'), _options, function( issues, @@ -247,7 +246,6 @@ it('blacklists modalities specified', function(isdone) { }) }) - it('checks for data dictionaries without corresponding data files', function(isdone) { validate.BIDS(createDatasetFileList('unused_data_dict'), options, function( issues, From 7ad713a9e91996c138374b69a04df64a0a3bb93e Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Thu, 10 Feb 2022 15:10:23 -0600 Subject: [PATCH 086/101] update consoleFormat spec to not check for undefined file in output. --- bids-validator/tests/consoleFormat.spec.js | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/bids-validator/tests/consoleFormat.spec.js b/bids-validator/tests/consoleFormat.spec.js index 537afe9a9..e443eb1c9 100644 --- a/bids-validator/tests/consoleFormat.spec.js +++ b/bids-validator/tests/consoleFormat.spec.js @@ -65,10 +65,9 @@ describe('console format', () => { assert(Array.isArray(output)) assert.deepEqual(output, [ '\t\u001b[31m1: [ERR] testing consoleFormat (code: undefined - TEST_ERROR)\u001b[39m', - '\t\t.undefined', - '\t\t\ttesting consoleFormat', - '\t\t\t@ line: -1 character: -1', - '\t\t\tEvidence: none', + '\t\ttesting consoleFormat', + '\t\t@ line: -1 character: -1', + '\t\tEvidence: none', '', ]) }) @@ -78,10 +77,9 @@ describe('console format', () => { }) assert.deepEqual(output, [ '\t\u001b[33m1: [WARN] testing consoleFormat (code: undefined - TEST_WARNING)\u001b[39m', - '\t\t.undefined', - '\t\t\ttesting consoleFormat', - '\t\t\t@ line: -1 character: -1', - '\t\t\tEvidence: none', + '\t\ttesting consoleFormat', + '\t\t@ line: -1 character: -1', + '\t\tEvidence: none', '', ]) }) From a8e582e4f794f9db7002f9060c67b9579736585b Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Fri, 11 Feb 2022 16:24:41 -0600 Subject: [PATCH 087/101] treat ignored files in node readDir like we do in browser. Call modality blacklist check before full summary, so we can error out on blacklisted modalities in bidsignore, and still have an accurate summary of non bidsignored files --- bids-validator/utils/files/readDir.js | 66 ++++++++++++---------- bids-validator/validators/bids/fullTest.js | 11 +++- 2 files changed, 43 insertions(+), 34 deletions(-) diff --git a/bids-validator/utils/files/readDir.js b/bids-validator/utils/files/readDir.js index 6321ef07b..745c4b7c7 100644 --- a/bids-validator/utils/files/readDir.js +++ b/bids-validator/utils/files/readDir.js @@ -226,9 +226,12 @@ const processFiles = (dir, ig, ...fileLists) => file.relativePath = path.normalize(`${path.sep}${file.path}`) return file }) - .filter(file => { + .map(file => { const ignore = ig.ignores(file.relativePath.slice(1)) - return !ignore + if (ignore) { + file.ignore = true + } + return file }) .map(file => { file.relativePath = harmonizeRelativePath(file.relativePath) @@ -276,40 +279,41 @@ async function getFilesFromFs(dir, rootPath, ig, options) { path.relative(rootPath, fullPath), ) const ignore = ig.ignores(path.relative('/', relativePath)) - if (!ignore) { - const fileObj = { - name: file.name, - path: fullPath, - relativePath, - } - // Three cases to consider: directories, files, symlinks - if (file.isDirectory()) { - await recursiveMerge(fullPath) - } else if (file.isSymbolicLink()) { - // Allow skipping symbolic links which lead to recursion - // Disabling this is a big performance advantage on high latency - // storage but it's a good default for versatility - if (!options.ignoreSymlinks) { - try { - const targetPath = await fs.promises.realpath(fullPath) - const targetStat = await fs.promises.stat(targetPath) - // Either add or recurse from the target depending - if (targetStat.isDirectory()) { - await recursiveMerge(targetPath) - } else { - filesAccumulator.push(fileObj) - } - } catch (err) { - // Symlink points at an invalid target, skip it - return + const fileObj = { + name: file.name, + path: fullPath, + relativePath, + } + if (ignore) { + fileObj.ignore = true + } + // Three cases to consider: directories, files, symlinks + if (file.isDirectory()) { + await recursiveMerge(fullPath) + } else if (file.isSymbolicLink()) { + // Allow skipping symbolic links which lead to recursion + // Disabling this is a big performance advantage on high latency + // storage but it's a good default for versatility + if (!options.ignoreSymlinks) { + try { + const targetPath = await fs.promises.realpath(fullPath) + const targetStat = await fs.promises.stat(targetPath) + // Either add or recurse from the target depending + if (targetStat.isDirectory()) { + await recursiveMerge(targetPath) + } else { + filesAccumulator.push(fileObj) } - } else { - // This branch assumes all symbolic links are not directories - filesAccumulator.push(fileObj) + } catch (err) { + // Symlink points at an invalid target, skip it + return } } else { + // This branch assumes all symbolic links are not directories filesAccumulator.push(fileObj) } + } else { + filesAccumulator.push(fileObj) } } return filesAccumulator diff --git a/bids-validator/validators/bids/fullTest.js b/bids-validator/validators/bids/fullTest.js index fe1c760d6..33129fba6 100644 --- a/bids-validator/validators/bids/fullTest.js +++ b/bids-validator/validators/bids/fullTest.js @@ -19,6 +19,7 @@ import checkReadme from './checkReadme' import validateMisc from '../../utils/files/validateMisc' import collectSubjectMetadata from '../../utils/summary/collectSubjectMetadata' import collectPetFields from '../../utils/summary/collectPetFields' +import collectModalities from '../../utils/summary/collectModalities' /** * Full Test @@ -44,11 +45,13 @@ const fullTest = (fileList, options, annexed, dir, schema, callback) => { const tsvs = [] - const summary = utils.collectSummary(fileList, self.options, schema) - if (self.options.blacklistModalities) { + const relativePaths = Object.keys(fileList).map( + file => fileList[file].relativePath, + ) + const preIgnoreModalities = collectModalities(relativePaths) self.options.blacklistModalities.map(mod => { - if (summary.modalities.includes(mod)) { + if (preIgnoreModalities.primary.includes(mod)) { self.issues.push( new Issue({ file: mod, @@ -60,6 +63,8 @@ const fullTest = (fileList, options, annexed, dir, schema, callback) => { }) } + const summary = utils.collectSummary(fileList, self.options, schema) + // remove size redundancies for (const key in fileList) { if (fileList.hasOwnProperty(key)) { From cf26b68502cf2c14763c70296bd2c58b78b55eac Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Fri, 11 Feb 2022 16:27:02 -0600 Subject: [PATCH 088/101] revert changes to gitTree ignore check in readDir.js --- bids-validator/utils/files/readDir.js | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/bids-validator/utils/files/readDir.js b/bids-validator/utils/files/readDir.js index 745c4b7c7..1156620e5 100644 --- a/bids-validator/utils/files/readDir.js +++ b/bids-validator/utils/files/readDir.js @@ -226,12 +226,9 @@ const processFiles = (dir, ig, ...fileLists) => file.relativePath = path.normalize(`${path.sep}${file.path}`) return file }) - .map(file => { + .filter(file => { const ignore = ig.ignores(file.relativePath.slice(1)) - if (ignore) { - file.ignore = true - } - return file + return !ignore }) .map(file => { file.relativePath = harmonizeRelativePath(file.relativePath) From b48b1ab0337c0090237803515b37f14603471995 Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Tue, 15 Feb 2022 16:30:06 -0600 Subject: [PATCH 089/101] v1.9.0 --- bids-validator-web/package.json | 2 +- bids-validator/package.json | 8 ++++---- lerna.json | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/bids-validator-web/package.json b/bids-validator-web/package.json index 75dd8310d..b26622635 100644 --- a/bids-validator-web/package.json +++ b/bids-validator-web/package.json @@ -1,6 +1,6 @@ { "name": "bids-validator-web", - "version": "1.8.10-dev.0", + "version": "1.9.0", "description": "web client for bids-validator", "main": "index.js", "license": "MIT", diff --git a/bids-validator/package.json b/bids-validator/package.json index 054da6d47..599cad8cc 100644 --- a/bids-validator/package.json +++ b/bids-validator/package.json @@ -1,6 +1,6 @@ { "name": "bids-validator", - "version": "1.8.10-dev.0", + "version": "1.9.0", "description": "", "main": "./dist/commonjs/index.js", "exports": { @@ -43,6 +43,7 @@ "cross-fetch": "^3.0.6", "date-fns": "^2.7.0", "events": "^3.3.0", + "exifreader": "^4.1.0", "hed-validator": "^3.5.0", "ignore": "^4.0.2", "is-utf8": "^0.2.1", @@ -57,10 +58,9 @@ "semver": "^7.3.2", "stream-browserify": "^3.0.0", "table": "^5.2.3", + "xml2js": "^0.4.23", "yaml": "^1.10.2", - "yargs": "^16.2.0", - "exifreader": "^4.1.0", - "xml2js": "^0.4.23" + "yargs": "^16.2.0" }, "devDependencies": { "adm-zip": "", diff --git a/lerna.json b/lerna.json index 40ea3a9dc..02f3ab01f 100644 --- a/lerna.json +++ b/lerna.json @@ -3,5 +3,5 @@ "bids-validator", "bids-validator-web" ], - "version": "1.8.10-dev.0" + "version": "1.9.0" } From c8284999045111c4942475a35a97b923a6137fdb Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Tue, 15 Feb 2022 16:30:11 -0600 Subject: [PATCH 090/101] v1.9.1-dev.0 --- bids-validator-web/package.json | 2 +- bids-validator/package.json | 2 +- lerna.json | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bids-validator-web/package.json b/bids-validator-web/package.json index b26622635..19bcaf0ad 100644 --- a/bids-validator-web/package.json +++ b/bids-validator-web/package.json @@ -1,6 +1,6 @@ { "name": "bids-validator-web", - "version": "1.9.0", + "version": "1.9.1-dev.0", "description": "web client for bids-validator", "main": "index.js", "license": "MIT", diff --git a/bids-validator/package.json b/bids-validator/package.json index 599cad8cc..691854786 100644 --- a/bids-validator/package.json +++ b/bids-validator/package.json @@ -1,6 +1,6 @@ { "name": "bids-validator", - "version": "1.9.0", + "version": "1.9.1-dev.0", "description": "", "main": "./dist/commonjs/index.js", "exports": { diff --git a/lerna.json b/lerna.json index 02f3ab01f..d0ed85687 100644 --- a/lerna.json +++ b/lerna.json @@ -3,5 +3,5 @@ "bids-validator", "bids-validator-web" ], - "version": "1.9.0" + "version": "1.9.1-dev.0" } From 2f46cb765b7442af746b7afd42a2e4b0c1cc38dc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 16 Feb 2022 10:06:05 +0100 Subject: [PATCH 091/101] Bump nanoid from 3.1.25 to 3.3.0 (#1421) Bumps [nanoid](https://github.com/ai/nanoid) from 3.1.25 to 3.3.0. - [Release notes](https://github.com/ai/nanoid/releases) - [Changelog](https://github.com/ai/nanoid/blob/main/CHANGELOG.md) - [Commits](https://github.com/ai/nanoid/compare/3.1.25...3.3.0) --- updated-dependencies: - dependency-name: nanoid dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- package-lock.json | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/package-lock.json b/package-lock.json index 6b0d14546..df8a5b5ed 100644 --- a/package-lock.json +++ b/package-lock.json @@ -19,7 +19,7 @@ } }, "bids-validator": { - "version": "1.8.10-dev.0", + "version": "1.9.1-dev.0", "license": "MIT", "dependencies": { "@aws-sdk/client-s3": "^3.9.0", @@ -72,7 +72,7 @@ } }, "bids-validator-web": { - "version": "1.8.10-dev.0", + "version": "1.9.1-dev.0", "license": "MIT", "dependencies": { "@babel/runtime": "^7.16.7", @@ -15943,9 +15943,9 @@ "optional": true }, "node_modules/nanoid": { - "version": "3.1.25", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.1.25.tgz", - "integrity": "sha512-rdwtIXaXCLFAQbnfqDRnI6jaRHp9fTcYBjtFKE8eezcZ7LuLjhUaQGNeMXf1HmRoCH32CLz6XwX0TtxEOS/A3Q==", + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.0.tgz", + "integrity": "sha512-JzxqqT5u/x+/KOFSd7JP15DOo9nOoHpx6DYatqIHUW2+flybkm+mdcraotSQR5WcnZr+qhGVh8Ted0KdfSMxlg==", "bin": { "nanoid": "bin/nanoid.cjs" }, @@ -34655,9 +34655,9 @@ "optional": true }, "nanoid": { - "version": "3.1.25", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.1.25.tgz", - "integrity": "sha512-rdwtIXaXCLFAQbnfqDRnI6jaRHp9fTcYBjtFKE8eezcZ7LuLjhUaQGNeMXf1HmRoCH32CLz6XwX0TtxEOS/A3Q==" + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.0.tgz", + "integrity": "sha512-JzxqqT5u/x+/KOFSd7JP15DOo9nOoHpx6DYatqIHUW2+flybkm+mdcraotSQR5WcnZr+qhGVh8Ted0KdfSMxlg==" }, "nanomatch": { "version": "1.2.13", From f804f49a350658915c67de54f40ba343c468de3e Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Wed, 16 Feb 2022 10:31:45 -0600 Subject: [PATCH 092/101] add prepublish step to root package.json --- package.json | 1 + 1 file changed, 1 insertion(+) diff --git a/package.json b/package.json index d51ddcc58..20a9080eb 100644 --- a/package.json +++ b/package.json @@ -10,6 +10,7 @@ "codecov": "codecov", "test": "node bids-validator/bin/test-submodule-exists && jest", "postversion": "lerna version prerelease --preid dev --ignore-scripts --yes --force-publish=*", + "prepublish": "npm run prepublish -w bids-validator", "web-dev": "cd bids-validator-web && npm run dev", "web-build": "cd bids-validator-web && npm run build", "web-start": "cd bids-validator-web && npm run build && npm start", From c07b264aa903acebceb3badf941e80b4dcbbc0d6 Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Wed, 16 Feb 2022 10:41:34 -0600 Subject: [PATCH 093/101] switch prepublish scripts to prepublishOnly, prepublish deprectaed in npm8. --- bids-validator/package.json | 2 +- package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bids-validator/package.json b/bids-validator/package.json index 691854786..f1b84ef42 100644 --- a/bids-validator/package.json +++ b/bids-validator/package.json @@ -17,7 +17,7 @@ }, "scripts": { "build": "node ./esbuild.mjs", - "prepublish": "npm run build" + "prepublishOnly": "npm run build" }, "repository": { "type": "git", diff --git a/package.json b/package.json index 20a9080eb..ab64891c3 100644 --- a/package.json +++ b/package.json @@ -10,7 +10,7 @@ "codecov": "codecov", "test": "node bids-validator/bin/test-submodule-exists && jest", "postversion": "lerna version prerelease --preid dev --ignore-scripts --yes --force-publish=*", - "prepublish": "npm run prepublish -w bids-validator", + "prepublishOnly": "npm run prepublishOnly -w bids-validator", "web-dev": "cd bids-validator-web && npm run dev", "web-build": "cd bids-validator-web && npm run build", "web-start": "cd bids-validator-web && npm run build && npm start", From 2a9d30f0bcaf650e60e0f41595120aa478f138a5 Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Wed, 16 Feb 2022 11:29:39 -0600 Subject: [PATCH 094/101] v1.9.1 --- bids-validator/package.json | 2 +- lerna.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bids-validator/package.json b/bids-validator/package.json index f1b84ef42..da30cc065 100644 --- a/bids-validator/package.json +++ b/bids-validator/package.json @@ -1,6 +1,6 @@ { "name": "bids-validator", - "version": "1.9.1-dev.0", + "version": "1.9.1", "description": "", "main": "./dist/commonjs/index.js", "exports": { diff --git a/lerna.json b/lerna.json index d0ed85687..6773a92b8 100644 --- a/lerna.json +++ b/lerna.json @@ -3,5 +3,5 @@ "bids-validator", "bids-validator-web" ], - "version": "1.9.1-dev.0" + "version": "1.9.1" } From c1aa9f6246dec66e99fa34b713a21a5be11f9a48 Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Wed, 16 Feb 2022 11:30:39 -0600 Subject: [PATCH 095/101] v1.9.2 --- bids-validator/package.json | 2 +- lerna.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bids-validator/package.json b/bids-validator/package.json index da30cc065..aa271290d 100644 --- a/bids-validator/package.json +++ b/bids-validator/package.json @@ -1,6 +1,6 @@ { "name": "bids-validator", - "version": "1.9.1", + "version": "1.9.2", "description": "", "main": "./dist/commonjs/index.js", "exports": { diff --git a/lerna.json b/lerna.json index 6773a92b8..592ac085b 100644 --- a/lerna.json +++ b/lerna.json @@ -3,5 +3,5 @@ "bids-validator", "bids-validator-web" ], - "version": "1.9.1" + "version": "1.9.2" } From 87c24022167a70d07762e3bb5772e745622da1fb Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Wed, 16 Feb 2022 11:30:44 -0600 Subject: [PATCH 096/101] v1.9.3-dev.0 --- bids-validator-web/package.json | 2 +- bids-validator/package.json | 2 +- lerna.json | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bids-validator-web/package.json b/bids-validator-web/package.json index 19bcaf0ad..aef7e8cd3 100644 --- a/bids-validator-web/package.json +++ b/bids-validator-web/package.json @@ -1,6 +1,6 @@ { "name": "bids-validator-web", - "version": "1.9.1-dev.0", + "version": "1.9.3-dev.0", "description": "web client for bids-validator", "main": "index.js", "license": "MIT", diff --git a/bids-validator/package.json b/bids-validator/package.json index aa271290d..c87d0fd75 100644 --- a/bids-validator/package.json +++ b/bids-validator/package.json @@ -1,6 +1,6 @@ { "name": "bids-validator", - "version": "1.9.2", + "version": "1.9.3-dev.0", "description": "", "main": "./dist/commonjs/index.js", "exports": { diff --git a/lerna.json b/lerna.json index 592ac085b..48e1b4b1e 100644 --- a/lerna.json +++ b/lerna.json @@ -3,5 +3,5 @@ "bids-validator", "bids-validator-web" ], - "version": "1.9.2" + "version": "1.9.3-dev.0" } From 98917dfdd359ef6d413766894e8fb279aaf7638b Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Fri, 7 Jan 2022 15:31:19 -0600 Subject: [PATCH 097/101] remove relative paths from npm scripts --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index ab64891c3..fa6620d91 100644 --- a/package.json +++ b/package.json @@ -5,7 +5,7 @@ "bids-validator-web" ], "scripts": { - "lint": "eslint bids-validator", + "lint": "eslint ./bids-validator/**/*.js", "coverage": "jest --coverage", "codecov": "codecov", "test": "node bids-validator/bin/test-submodule-exists && jest", From 1e9936bda21efdd2fd9c750c43d08d03f0877d25 Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Tue, 15 Mar 2022 14:50:15 -0500 Subject: [PATCH 098/101] fix errant import from rebase? --- bids-validator/utils/files/index.js | 2 -- package-lock.json | 16 ++++++++-------- 2 files changed, 8 insertions(+), 10 deletions(-) diff --git a/bids-validator/utils/files/index.js b/bids-validator/utils/files/index.js index b93324da6..237b940aa 100644 --- a/bids-validator/utils/files/index.js +++ b/bids-validator/utils/files/index.js @@ -15,7 +15,6 @@ import illegalCharacterTest from './illegalCharacterTest' import sessions from './sessions' import remoteFiles from './remoteFiles' import getFileStats from './getFileStats' -import generateMergedSidecarDictWithPath from './generateMergedSidecarDictWithPath' // public API --------------------------------------------------------------------- @@ -28,7 +27,6 @@ export default { readOMEFile, readNiftiHeader, generateMergedSidecarDict, - generateMergedSidecarDictWithPath, potentialLocations, getBFileContent, collectDirectorySize, diff --git a/package-lock.json b/package-lock.json index df8a5b5ed..ec411f19a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -19,7 +19,7 @@ } }, "bids-validator": { - "version": "1.9.1-dev.0", + "version": "1.9.3-dev.0", "license": "MIT", "dependencies": { "@aws-sdk/client-s3": "^3.9.0", @@ -72,7 +72,7 @@ } }, "bids-validator-web": { - "version": "1.9.1-dev.0", + "version": "1.9.3-dev.0", "license": "MIT", "dependencies": { "@babel/runtime": "^7.16.7", @@ -7732,9 +7732,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001252", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001252.tgz", - "integrity": "sha512-I56jhWDGMtdILQORdusxBOH+Nl/KgQSdDmpJezYddnAkVOmnoU8zwjTV9xAjMIYxr0iPreEAVylCGcmHCjfaOw==", + "version": "1.0.30001317", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001317.tgz", + "integrity": "sha512-xIZLh8gBm4dqNX0gkzrBeyI86J2eCjWzYAs40q88smG844YIrN4tVQl/RhquHvKEKImWWFIVh1Lxe5n1G/N+GQ==", "funding": { "type": "opencollective", "url": "https://opencollective.com/browserslist" @@ -28261,9 +28261,9 @@ } }, "caniuse-lite": { - "version": "1.0.30001252", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001252.tgz", - "integrity": "sha512-I56jhWDGMtdILQORdusxBOH+Nl/KgQSdDmpJezYddnAkVOmnoU8zwjTV9xAjMIYxr0iPreEAVylCGcmHCjfaOw==" + "version": "1.0.30001317", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001317.tgz", + "integrity": "sha512-xIZLh8gBm4dqNX0gkzrBeyI86J2eCjWzYAs40q88smG844YIrN4tVQl/RhquHvKEKImWWFIVh1Lxe5n1G/N+GQ==" }, "capture-exit": { "version": "2.0.0", From 0b43f9647fcb482dcdd32510c723dae3a08b7992 Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Tue, 15 Mar 2022 16:06:06 -0500 Subject: [PATCH 099/101] fix lint issues. fix call to linter for linux in package.json --- bids-validator/utils/issues/list.js | 3 +- bids-validator/validators/bids/fullTest.js | 4 +-- .../__tests__/checkJSONAndField.spec.js | 1 - .../microscopy/__tests__/validate.spec.js | 1 - .../microscopy/checkJSONAndField.js | 2 +- .../validators/microscopy/checkSamples.js | 3 -- .../validators/microscopy/ometiff.js | 35 ++++++++++++++----- package.json | 2 +- 8 files changed, 33 insertions(+), 18 deletions(-) diff --git a/bids-validator/utils/issues/list.js b/bids-validator/utils/issues/list.js index b33a6b6f9..46b8b788f 100644 --- a/bids-validator/utils/issues/list.js +++ b/bids-validator/utils/issues/list.js @@ -712,7 +712,8 @@ export default { 139: { key: 'BLACKLISTED_MODALITY', severity: 'error', - reason: 'Found a modality that has been blacklisted through validator configuration.', + reason: + 'Found a modality that has been blacklisted through validator configuration.', }, 140: { key: '140_EMPTY', diff --git a/bids-validator/validators/bids/fullTest.js b/bids-validator/validators/bids/fullTest.js index 33129fba6..16d3acdee 100644 --- a/bids-validator/validators/bids/fullTest.js +++ b/bids-validator/validators/bids/fullTest.js @@ -56,8 +56,8 @@ const fullTest = (fileList, options, annexed, dir, schema, callback) => { new Issue({ file: mod, evidence: `found ${mod} files`, - code: 139 - }) + code: 139, + }), ) } }) diff --git a/bids-validator/validators/microscopy/__tests__/checkJSONAndField.spec.js b/bids-validator/validators/microscopy/__tests__/checkJSONAndField.spec.js index 62fa586b5..ef1649e25 100644 --- a/bids-validator/validators/microscopy/__tests__/checkJSONAndField.spec.js +++ b/bids-validator/validators/microscopy/__tests__/checkJSONAndField.spec.js @@ -1,4 +1,3 @@ -import { assert } from 'chai' import checkJSONAndField from '../checkJSONAndField' describe('checkJSONAndField()', () => { diff --git a/bids-validator/validators/microscopy/__tests__/validate.spec.js b/bids-validator/validators/microscopy/__tests__/validate.spec.js index 2bf7a59f2..23dd3ff63 100644 --- a/bids-validator/validators/microscopy/__tests__/validate.spec.js +++ b/bids-validator/validators/microscopy/__tests__/validate.spec.js @@ -1,6 +1,5 @@ import path from 'path' -import readDir from '../../../utils/files/readDir' import validate from '../validate' const dataDir = path.join(__dirname, '/data') diff --git a/bids-validator/validators/microscopy/checkJSONAndField.js b/bids-validator/validators/microscopy/checkJSONAndField.js index 266f3481e..d23be516f 100644 --- a/bids-validator/validators/microscopy/checkJSONAndField.js +++ b/bids-validator/validators/microscopy/checkJSONAndField.js @@ -65,7 +65,7 @@ const ifJsonExist = (file, possibleJsonPath, jsonContentsDict) => { if (chunkPresent) { return checkMatrixField(file, mergedDictionary) } - + return [] } diff --git a/bids-validator/validators/microscopy/checkSamples.js b/bids-validator/validators/microscopy/checkSamples.js index c535a8053..788d5f7a4 100644 --- a/bids-validator/validators/microscopy/checkSamples.js +++ b/bids-validator/validators/microscopy/checkSamples.js @@ -1,10 +1,7 @@ -import isNode from '../../utils/isNode' - const Issue = require('../../utils').issues.Issue const checkSamples = fileList => { const issues = [] - const fileKeys = Object.keys(fileList) const samplesFile = Array.from(Object.values(fileList)).find( file => file.relativePath && file.relativePath == '/samples.tsv', ) diff --git a/bids-validator/validators/microscopy/ometiff.js b/bids-validator/validators/microscopy/ometiff.js index 6e6030375..eb37792e0 100644 --- a/bids-validator/validators/microscopy/ometiff.js +++ b/bids-validator/validators/microscopy/ometiff.js @@ -85,11 +85,18 @@ const checkOptionalFields = (omePath, omeData, namespace, jsonData) => { if ( omeData[`${namespace}OME`][`${namespace}Instrument`] && - omeData[`${namespace}OME`][`${namespace}Instrument`][0][`${namespace}Objective`] + omeData[`${namespace}OME`][`${namespace}Instrument`][0][ + `${namespace}Objective` + ] ) { let objective = - omeData[`${namespace}OME`][`${namespace}Instrument`][0][`${namespace}Objective`][0]['$'] + omeData[`${namespace}OME`][`${namespace}Instrument`][0][ + `${namespace}Objective` + ][0]['$'] for (let field in fields) { + if (!fields.hasOwnPorperty(field)) { + continue + } let property = fields[field] if (jsonData.hasOwnProperty(field) && objective[property]) { if (objective[property] != jsonData[field]) { @@ -116,17 +123,29 @@ const checkPixelSize = (omeData, namespace, jsonData) => { let validUnits = ['um', 'µm', 'nm', 'mm'] const PhysicalSizeX = - omeData[`${namespace}OME`][`${namespace}Image`][0][`${namespace}Pixels`][0]['$']['PhysicalSizeX'] + omeData[`${namespace}OME`][`${namespace}Image`][0][`${namespace}Pixels`][0][ + '$' + ]['PhysicalSizeX'] const physicalSizeXUnit = - omeData[`${namespace}OME`][`${namespace}Image`][0][`${namespace}Pixels`][0]['$']['PhysicalSizeXUnit'] + omeData[`${namespace}OME`][`${namespace}Image`][0][`${namespace}Pixels`][0][ + '$' + ]['PhysicalSizeXUnit'] const PhysicalSizeY = - omeData[`${namespace}OME`][`${namespace}Image`][0][`${namespace}Pixels`][0]['$']['PhysicalSizeY'] + omeData[`${namespace}OME`][`${namespace}Image`][0][`${namespace}Pixels`][0][ + '$' + ]['PhysicalSizeY'] const physicalSizeYUnit = - omeData[`${namespace}OME`][`${namespace}Image`][0][`${namespace}Pixels`][0]['$']['PhysicalSizeYUnit'] + omeData[`${namespace}OME`][`${namespace}Image`][0][`${namespace}Pixels`][0][ + '$' + ]['PhysicalSizeYUnit'] const PhysicalSizeZ = - omeData[`${namespace}OME`][`${namespace}Image`][0][`${namespace}Pixels`][0]['$']['PhysicalSizeZ'] + omeData[`${namespace}OME`][`${namespace}Image`][0][`${namespace}Pixels`][0][ + '$' + ]['PhysicalSizeZ'] const physicalSizeZUnit = - omeData[`${namespace}OME`][`${namespace}Image`][0][`${namespace}Pixels`][0]['$']['PhysicalSizeZUnit'] + omeData[`${namespace}OME`][`${namespace}Image`][0][`${namespace}Pixels`][0][ + '$' + ]['PhysicalSizeZUnit'] // if no corresponding json file, skip the consistency check if (Object.keys(jsonData).length === 0) return [] diff --git a/package.json b/package.json index fa6620d91..0abccc077 100644 --- a/package.json +++ b/package.json @@ -5,7 +5,7 @@ "bids-validator-web" ], "scripts": { - "lint": "eslint ./bids-validator/**/*.js", + "lint": "eslint './bids-validator/**/*.js'", "coverage": "jest --coverage", "codecov": "codecov", "test": "node bids-validator/bin/test-submodule-exists && jest", From b83c04d55f419b1e6ab5f3c28d61f0c5ddf886ed Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Tue, 15 Mar 2022 16:15:33 -0500 Subject: [PATCH 100/101] porperty -> property --- bids-validator/validators/microscopy/ometiff.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bids-validator/validators/microscopy/ometiff.js b/bids-validator/validators/microscopy/ometiff.js index eb37792e0..427a81ec8 100644 --- a/bids-validator/validators/microscopy/ometiff.js +++ b/bids-validator/validators/microscopy/ometiff.js @@ -94,7 +94,7 @@ const checkOptionalFields = (omePath, omeData, namespace, jsonData) => { `${namespace}Objective` ][0]['$'] for (let field in fields) { - if (!fields.hasOwnPorperty(field)) { + if (!fields.hasOwnProperty(field)) { continue } let property = fields[field] From e5e9250d685ab5f8fd58d96278b48c566a880a5f Mon Sep 17 00:00:00 2001 From: Ross Blair Date: Tue, 15 Mar 2022 16:29:35 -0500 Subject: [PATCH 101/101] does windows actually care about single vs double quotes? --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 0abccc077..892d0a9d3 100644 --- a/package.json +++ b/package.json @@ -5,7 +5,7 @@ "bids-validator-web" ], "scripts": { - "lint": "eslint './bids-validator/**/*.js'", + "lint": "eslint \"./bids-validator/**/*.js\"", "coverage": "jest --coverage", "codecov": "codecov", "test": "node bids-validator/bin/test-submodule-exists && jest",