Skip to content

Commit

Permalink
feat(image-io): add readImage
Browse files Browse the repository at this point in the history
  • Loading branch information
thewtex committed Oct 9, 2023
1 parent cfbfd13 commit 1440b18
Show file tree
Hide file tree
Showing 125 changed files with 793 additions and 327 deletions.
Binary file not shown.
Binary file not shown.
Original file line number Diff line number Diff line change
Expand Up @@ -52,12 +52,12 @@ async function vectorMagnitudeNode(
stderr,
outputs
} = await runPipelineNode(pipelinePath, args, desiredOutputs, inputs)
if (returnValue !== 0) {
if (returnValue !== 0 && stderr !== "") {
throw new Error(stderr)
}

const result = {
magnitudeImage: outputs[0].data as Image,
magnitudeImage: outputs[0]?.data as Image,
}
return result
}
Expand Down
4 changes: 2 additions & 2 deletions packages/compare-images/typescript/src/vector-magnitude.ts
Original file line number Diff line number Diff line change
Expand Up @@ -55,13 +55,13 @@ async function vectorMagnitude(
stderr,
outputs
} = await runPipeline(webWorker, pipelinePath, args, desiredOutputs, inputs, { pipelineBaseUrl: getPipelinesBaseUrl(), pipelineWorkerUrl: getPipelineWorkerUrl() })
if (returnValue !== 0) {
if (returnValue !== 0 && stderr !== "") {
throw new Error(stderr)
}

const result = {
webWorker: usedWebWorker as Worker,
magnitudeImage: outputs[0].data as Image,
magnitudeImage: outputs[0]?.data as Image,
}
return result
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,12 +65,12 @@ async function compressStringifyNode(
stderr,
outputs
} = await runPipelineNode(pipelinePath, args, desiredOutputs, inputs)
if (returnValue !== 0) {
if (returnValue !== 0 && stderr !== "") {
throw new Error(stderr)
}

const result = {
output: (outputs[0].data as BinaryStream).data,
output: (outputs[0]?.data as BinaryStream).data,
}
return result
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,13 +68,13 @@ async function compressStringify(
stderr,
outputs
} = await runPipeline(webWorker, pipelinePath, args, desiredOutputs, inputs, { pipelineBaseUrl: getPipelinesBaseUrl(), pipelineWorkerUrl: getPipelineWorkerUrl() })
if (returnValue !== 0) {
if (returnValue !== 0 && stderr !== "") {
throw new Error(stderr)
}

const result = {
webWorker: usedWebWorker as Worker,
output: (outputs[0].data as BinaryStream).data,
output: (outputs[0]?.data as BinaryStream).data,
}
return result
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,12 +57,12 @@ async function parseStringDecompressNode(
stderr,
outputs
} = await runPipelineNode(pipelinePath, args, desiredOutputs, inputs)
if (returnValue !== 0) {
if (returnValue !== 0 && stderr !== "") {
throw new Error(stderr)
}

const result = {
output: (outputs[0].data as BinaryStream).data,
output: (outputs[0]?.data as BinaryStream).data,
}
return result
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,13 +60,13 @@ async function parseStringDecompress(
stderr,
outputs
} = await runPipeline(webWorker, pipelinePath, args, desiredOutputs, inputs, { pipelineBaseUrl: getPipelinesBaseUrl(), pipelineWorkerUrl: getPipelineWorkerUrl() })
if (returnValue !== 0) {
if (returnValue !== 0 && stderr !== "") {
throw new Error(stderr)
}

const result = {
webWorker: usedWebWorker as Worker,
output: (outputs[0].data as BinaryStream).data,
output: (outputs[0]?.data as BinaryStream).data,
}
return result
}
Expand Down
Binary file not shown.
Binary file not shown.
Original file line number Diff line number Diff line change
Expand Up @@ -86,13 +86,13 @@ async function applyPresentationStateToImageNode(
stderr,
outputs
} = await runPipelineNode(pipelinePath, args, desiredOutputs, inputs, mountDirs)
if (returnValue !== 0) {
if (returnValue !== 0 && stderr !== "") {
throw new Error(stderr)
}

const result = {
presentationStateOutStream: outputs[0].data as JsonCompatible,
outputImage: outputs[1].data as Image,
presentationStateOutStream: outputs[0]?.data as JsonCompatible,
outputImage: outputs[1]?.data as Image,
}
return result
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -96,14 +96,14 @@ async function applyPresentationStateToImage(
stderr,
outputs
} = await runPipeline(webWorker, pipelinePath, args, desiredOutputs, inputs, { pipelineBaseUrl: getPipelinesBaseUrl(), pipelineWorkerUrl: getPipelineWorkerUrl() })
if (returnValue !== 0) {
if (returnValue !== 0 && stderr !== "") {
throw new Error(stderr)
}

const result = {
webWorker: usedWebWorker as Worker,
presentationStateOutStream: outputs[0].data as JsonCompatible,
outputImage: outputs[1].data as Image,
presentationStateOutStream: outputs[0]?.data as JsonCompatible,
outputImage: outputs[1]?.data as Image,
}
return result
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,12 +102,12 @@ async function readDicomEncapsulatedPdfNode(
stderr,
outputs
} = await runPipelineNode(pipelinePath, args, desiredOutputs, inputs, mountDirs)
if (returnValue !== 0) {
if (returnValue !== 0 && stderr !== "") {
throw new Error(stderr)
}

const result = {
pdfBinaryOutput: (outputs[0].data as BinaryStream).data,
pdfBinaryOutput: (outputs[0]?.data as BinaryStream).data,
}
return result
}
Expand Down
4 changes: 2 additions & 2 deletions packages/dicom/typescript/src/read-dicom-encapsulated-pdf.ts
Original file line number Diff line number Diff line change
Expand Up @@ -108,13 +108,13 @@ async function readDicomEncapsulatedPdf(
stderr,
outputs
} = await runPipeline(webWorker, pipelinePath, args, desiredOutputs, inputs, { pipelineBaseUrl: getPipelinesBaseUrl(), pipelineWorkerUrl: getPipelineWorkerUrl() })
if (returnValue !== 0) {
if (returnValue !== 0 && stderr !== "") {
throw new Error(stderr)
}

const result = {
webWorker: usedWebWorker as Worker,
pdfBinaryOutput: (outputs[0].data as BinaryStream).data,
pdfBinaryOutput: (outputs[0]?.data as BinaryStream).data,
}
return result
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,13 +69,13 @@ async function readImageDicomFileSeriesNode(
stderr,
outputs
} = await runPipelineNode(pipelinePath, args, desiredOutputs, inputs, mountDirs)
if (returnValue !== 0) {
if (returnValue !== 0 && stderr !== "") {
throw new Error(stderr)
}

const result = {
outputImage: outputs[0].data as Image,
sortedFilenames: outputs[1].data as JsonCompatible,
outputImage: outputs[0]?.data as Image,
sortedFilenames: outputs[1]?.data as JsonCompatible,
}
return result
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -188,12 +188,12 @@ async function structuredReportToHtmlNode(
stderr,
outputs
} = await runPipelineNode(pipelinePath, args, desiredOutputs, inputs, mountDirs)
if (returnValue !== 0) {
if (returnValue !== 0 && stderr !== "") {
throw new Error(stderr)
}

const result = {
outputText: (outputs[0].data as TextStream).data,
outputText: (outputs[0]?.data as TextStream).data,
}
return result
}
Expand Down
4 changes: 2 additions & 2 deletions packages/dicom/typescript/src/structured-report-to-html.ts
Original file line number Diff line number Diff line change
Expand Up @@ -200,13 +200,13 @@ async function structuredReportToHtml(
stderr,
outputs
} = await runPipeline(webWorker, pipelinePath, args, desiredOutputs, inputs, { pipelineBaseUrl: getPipelinesBaseUrl(), pipelineWorkerUrl: getPipelineWorkerUrl() })
if (returnValue !== 0) {
if (returnValue !== 0 && stderr !== "") {
throw new Error(stderr)
}

const result = {
webWorker: usedWebWorker as Worker,
outputText: (outputs[0].data as TextStream).data,
outputText: (outputs[0]?.data as TextStream).data,
}
return result
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -108,12 +108,12 @@ async function structuredReportToTextNode(
stderr,
outputs
} = await runPipelineNode(pipelinePath, args, desiredOutputs, inputs, mountDirs)
if (returnValue !== 0) {
if (returnValue !== 0 && stderr !== "") {
throw new Error(stderr)
}

const result = {
outputText: (outputs[0].data as TextStream).data,
outputText: (outputs[0]?.data as TextStream).data,
}
return result
}
Expand Down
4 changes: 2 additions & 2 deletions packages/dicom/typescript/src/structured-report-to-text.ts
Original file line number Diff line number Diff line change
Expand Up @@ -114,13 +114,13 @@ async function structuredReportToText(
stderr,
outputs
} = await runPipeline(webWorker, pipelinePath, args, desiredOutputs, inputs, { pipelineBaseUrl: getPipelinesBaseUrl(), pipelineWorkerUrl: getPipelineWorkerUrl() })
if (returnValue !== 0) {
if (returnValue !== 0 && stderr !== "") {
throw new Error(stderr)
}

const result = {
webWorker: usedWebWorker as Worker,
outputText: (outputs[0].data as TextStream).data,
outputText: (outputs[0]?.data as TextStream).data,
}
return result
}
Expand Down
6 changes: 1 addition & 5 deletions packages/image-io/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -210,10 +210,6 @@ foreach(io_module ${WebAssemblyInterface_ImageIOModules} WebAssemblyInterface)
endif()
endforeach()
endforeach()
if(EMSCRIPTEN)
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/image-io-index.ts.in
${CMAKE_CURRENT_SOURCE_DIR}/typescript/src/image-io-index.ts @ONLY)
endif()

enable_testing()

Expand All @@ -231,4 +227,4 @@ add_test(NAME bio-rad-write-image-test
COMMAND bio-rad-write-image
${baseline_dir}/bio-rad-read-image-test.iwi.cbor
${output_dir}/bio-rad-write-image-test.could-write.json
${output_dir}/bio-rad-write-image-test.pic)
${output_dir}/bio-rad-write-image-test.pic)
3 changes: 0 additions & 3 deletions packages/image-io/image-io-index.ts.in

This file was deleted.

59 changes: 59 additions & 0 deletions packages/image-io/typescript/cypress/e2e/read-image.cy.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
import { demoServer } from './common.ts'

// const cthead1SmallBase64DataURI = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAAAAABWESUoAAAABGdBTUEAALGPC/xhBQAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAAAmJLR0QA/4ePzL8AAAAHdElNRQfhBQYVKw8AZTNIAAADdklEQVQ4y2WTa2wUVRiGp6W7O3POnLmc2VrstokJlrBIUBJigjfSICVCCAo/QKM/FFNRIESJQKAws3M7M2f20t3GthRKQQq0kkoXMIq9oFwCXkg0UpMakGLgR9EmJF4TNOvZhRBb31+TvM955/vO+T6Ou69pAgSwKCCAEPc/lYUhFEUkMgH2ESmbYocEEUmKLIQqBKmEgUlERQhAPhyJiDMXPFZZDmRGoP8Q5TwC4ciMpatfXE9zmT2NVRVIQiLi76cDUVRDT/m72zLUc/Srv+gNCi8jhCrupvMAQIWf1zJx58pRj7g7h/sduunhiIIkUAJ4AUBZ0LZev3TondmeS42TuaYms6kOapJUalYQAAKxt+j4qD3yxvMZ0z47NLi/ydhWA7GMinWyAH6G1Wwe/OdUz6dz33T35dPdIxdIYrPGK0qxTnYrobVtjm+3pNvPxGu9/dTRgw8/e89et0AKF1uFItS2u7ZP7fr4K3H19VbP94me/T6fXRifM6+a/QKC6N5+PWGYZhVeNn9pzvUoTVnt3/QEz81dUTONgwjis4UzvS2Z5JbY9JlPdxmEuFZzX9va0yu5WlXmRAlWd3Tmjg980vXBprJZbYPtza0dXw40ZleeP1ZbrWKOXXpsu7Grb3gnsY/27B46+e3ElVuF3w+sm7Pki2VAUxkAo1t0a7TL8YnVPZxy6KG9fX/+2qu/+9DARoAVBiDYaHjnfc/3nHOdicA1Em6WpnOdG/I6zwCA5PCzrn6uw6VO99gBnRBKGUyIMfz3BgmrHHta8cEdu04dN6wjPwy6FinaTNT8emKNzGrgBEmJLLf7T6Tf/60wpFP2oKToB/bNr+pVTWHjghQxZuTzW51C4aIZENdj8gMv+1f3I7iYwPEqrFu+z1/zzI3vHN/ziEd9P0haV39aXxXFRaBMRrCu9Vjj5o/S5C4QBCnjws+pJ9SoqpZmRlqyeNWlPa922El22PMCl5if38q9FGV+CeAaFuK4OZY5nLRoksnsPX19nL5do2GsREoAlCtr68lo4VoXNROWdXD8j7GUNV96AMPye5MtYgU/ujF/887tHy+PXLt9o9/asUipvDfWpc1QNFWKPfla8PHI5Ysnsua2l2dH1Un7WS6rKlamxx9f/MKKhkX1syoxmLqcUMVRDTNMlZGkilPsUrOsJ6wxRSel/wuAkzbenLRf4gAAACV0RVh0ZGF0ZTpjcmVhdGUAMjAxNy0wNS0wNlQxNzoyNjozNC0wNDowMORO/MMAAAAldEVYdGRhdGU6bW9kaWZ5ADIwMTctMDUtMDZUMTc6MjY6MzQtMDQ6MDCVE0R/AAAAAElFTkSuQmCC'
// const byteString = window.atob(cthead1SmallBase64DataURI.split(',')[1])
// const mimeString = cthead1SmallBase64DataURI.split(',')[0].split(':')[1].split(';')[0]
// const intArray = new Uint8Array(byteString.length)
// for (let ii = 0; ii < byteString.length; ++ii) {
// intArray[ii] = byteString.charCodeAt(ii)
// }

// const cthead1SmallBlob = new window.Blob([intArray], { type: mimeString })
// const cthead1SmallFile = new window.File([cthead1SmallBlob], 'cthead1Small.png')

function verifyImage (t, image, componentType, pixelType) {
t.is(image.imageType.dimension, 2, 'dimension')
t.is(image.imageType.componentType, componentType)
t.is(image.imageType.pixelType, pixelType)
t.is(image.imageType.components, 1, 'components')
t.is(image.origin[0], 0.0, 'origin[0]')
t.is(image.origin[1], 0.0, 'origin[1]')
t.is(image.spacing[0], 1.0, 'spacing[0]')
t.is(image.spacing[1], 1.0, 'spacing[1]')
t.is(getMatrixElement(image.direction, 2, 0, 0), 1.0, 'direction (0, 0)')
t.is(getMatrixElement(image.direction, 2, 0, 1), 0.0, 'direction (0, 1)')
t.is(getMatrixElement(image.direction, 2, 1, 0), 0.0, 'direction (1, 0)')
t.is(getMatrixElement(image.direction, 2, 1, 1), 1.0, 'direction (1, 1)')
t.is(image.size[0], 32, 'size[0]')
t.is(image.size[1], 32, 'size[1]')
t.is(image.data.length, 1024, 'data.length')
t.is(image.data[512], 12, 'data[512]')
t.end()
}

describe('read-image', () => {
beforeEach(function() {
cy.visit(demoServer)

const testPathPrefix = '../test/data/input/'

const testImageFiles = [
'cthead1.png'
]
testImageFiles.forEach((fileName) => {
cy.readFile(`${testPathPrefix}${fileName}`, null).as(fileName)
})
})

it('Reads a image', function () {
cy.get('sl-tab[panel="readImage-panel"]').click()

const testFile = { contents: new Uint8Array(this['cthead1.png']), fileName: 'cthead1.png' }
cy.get('#readImageInputs input[name="serialized-image-file"]').selectFile([testFile,], { force: true })
cy.get('#readImage-serialized-image-details').should('contain', '137,80')

cy.get('#readImageInputs sl-button[name="run"]').click()

cy.get('#readImage-image-details').should('contain', 'imageType')
})
})
6 changes: 3 additions & 3 deletions packages/image-io/typescript/src/bio-rad-read-image-node.ts
Original file line number Diff line number Diff line change
Expand Up @@ -65,13 +65,13 @@ async function bioRadReadImageNode(
stderr,
outputs
} = await runPipelineNode(pipelinePath, args, desiredOutputs, inputs, mountDirs)
if (returnValue !== 0) {
if (returnValue !== 0 && stderr !== "") {
throw new Error(stderr)
}

const result = {
couldRead: outputs[0].data as JsonCompatible,
image: outputs[1].data as Image,
couldRead: outputs[0]?.data as JsonCompatible,
image: outputs[1]?.data as Image,
}
return result
}
Expand Down
6 changes: 3 additions & 3 deletions packages/image-io/typescript/src/bio-rad-read-image.ts
Original file line number Diff line number Diff line change
Expand Up @@ -71,14 +71,14 @@ async function bioRadReadImage(
stderr,
outputs
} = await runPipeline(webWorker, pipelinePath, args, desiredOutputs, inputs, { pipelineBaseUrl: getPipelinesBaseUrl(), pipelineWorkerUrl: getPipelineWorkerUrl() })
if (returnValue !== 0) {
if (returnValue !== 0 && stderr !== "") {
throw new Error(stderr)
}

const result = {
webWorker: usedWebWorker as Worker,
couldRead: outputs[0].data as JsonCompatible,
image: outputs[1].data as Image,
couldRead: outputs[0]?.data as JsonCompatible,
image: outputs[1]?.data as Image,
}
return result
}
Expand Down
4 changes: 2 additions & 2 deletions packages/image-io/typescript/src/bio-rad-write-image-node.ts
Original file line number Diff line number Diff line change
Expand Up @@ -69,12 +69,12 @@ async function bioRadWriteImageNode(
stderr,
outputs
} = await runPipelineNode(pipelinePath, args, desiredOutputs, inputs, mountDirs)
if (returnValue !== 0) {
if (returnValue !== 0 && stderr !== "") {
throw new Error(stderr)
}

const result = {
couldWrite: outputs[0].data as JsonCompatible,
couldWrite: outputs[0]?.data as JsonCompatible,
}
return result
}
Expand Down
6 changes: 3 additions & 3 deletions packages/image-io/typescript/src/bio-rad-write-image.ts
Original file line number Diff line number Diff line change
Expand Up @@ -71,14 +71,14 @@ async function bioRadWriteImage(
stderr,
outputs
} = await runPipeline(webWorker, pipelinePath, args, desiredOutputs, inputs, { pipelineBaseUrl: getPipelinesBaseUrl(), pipelineWorkerUrl: getPipelineWorkerUrl() })
if (returnValue !== 0) {
if (returnValue !== 0 && stderr !== "") {
throw new Error(stderr)
}

const result = {
webWorker: usedWebWorker as Worker,
couldWrite: outputs[0].data as JsonCompatible,
serializedImage: outputs[1].data as BinaryFile,
couldWrite: outputs[0]?.data as JsonCompatible,
serializedImage: outputs[1]?.data as BinaryFile,
}
return result
}
Expand Down
Loading

0 comments on commit 1440b18

Please sign in to comment.