Skip to content

Commit

Permalink
Remove biome config
Browse files Browse the repository at this point in the history
  • Loading branch information
cmdcolin committed Dec 15, 2024
1 parent 3bf2afb commit 57971d6
Show file tree
Hide file tree
Showing 8 changed files with 97 additions and 124 deletions.
2 changes: 1 addition & 1 deletion src/cramFile/codecs/_base.ts
Original file line number Diff line number Diff line change
Expand Up @@ -41,5 +41,5 @@ export default abstract class CramCodec<
coreDataBlock: CramFileBlock,
blocksByContentId: Record<number, CramFileBlock>,
cursors: Cursors,
): DataTypeMapping[TResult]
): DataTypeMapping[TResult] | undefined
}
16 changes: 4 additions & 12 deletions src/cramFile/codecs/byteArrayLength.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,22 +32,14 @@ export default class ByteArrayStopCodec extends CramCodec<
cursors: Cursors,
) {
const lengthCodec = this._getLengthCodec()
const arrayLength = lengthCodec.decode(
slice,
coreDataBlock,
blocksByContentId,
cursors,
)
const arrayLength =
lengthCodec.decode(slice, coreDataBlock, blocksByContentId, cursors) || 0

const dataCodec = this._getDataCodec()
const data = new Uint8Array(arrayLength)
for (let i = 0; i < arrayLength; i += 1) {
data[i] = dataCodec.decode(
slice,
coreDataBlock,
blocksByContentId,
cursors,
)
data[i] =
dataCodec.decode(slice, coreDataBlock, blocksByContentId, cursors) || 0
}

return data
Expand Down
8 changes: 2 additions & 6 deletions src/cramFile/codecs/external.ts
Original file line number Diff line number Diff line change
Expand Up @@ -39,13 +39,9 @@ export default class ExternalCodec extends CramCodec<
) {
const { blockContentId } = this.parameters
const contentBlock = blocksByContentId[blockContentId]
if (!contentBlock) {
throw new CramMalformedError(
`no block found with content ID ${blockContentId}}`,
)
}

const cursor = cursors.externalBlocks.getCursor(blockContentId)
return this._decodeData(contentBlock, cursor)
return contentBlock ? this._decodeData(contentBlock, cursor) : undefined
}

_decodeInt(contentBlock: CramFileBlock, cursor: Cursor) {
Expand Down
26 changes: 5 additions & 21 deletions src/cramFile/container/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ export default class CramContainer {

// if there are no records in the container, there will be no compression
// header
if (!containerHeader?.numRecords) {
if (!containerHeader.numRecords) {
return null
}
const { majorVersion } = await this.file.getDefinition()
Expand Down Expand Up @@ -51,9 +51,6 @@ export default class CramContainer {

async getFirstBlock() {
const containerHeader = await this.getHeader()
if (!containerHeader) {
return undefined
}
return this.file.readBlock(containerHeader._endPosition)
}

Expand All @@ -78,12 +75,6 @@ export default class CramContainer {
const { majorVersion } = await this.file.getDefinition()
const sectionParsers = getSectionParsers(majorVersion)
const { cramContainerHeader1, cramContainerHeader2 } = sectionParsers
const { size: fileSize } = await this.file.stat()

if (position >= fileSize) {
console.warn(`pos:${position}>=fileSize:${fileSize} in cram container`)
return undefined
}

// parse the container header. do it in 2 pieces because you cannot tell
// how much to buffer until you read numLandmarks
Expand All @@ -93,13 +84,6 @@ export default class CramContainer {
)
const header1 = parseItem(bytes1, cramContainerHeader1.parser)
const numLandmarksSize = itf8Size(header1.numLandmarks)
if (position + header1.length >= fileSize) {
// header indicates container goes beyond fileSize
console.warn(
`container at ${position} is beyond fileSize:${fileSize}, skipping`,
)
return undefined
}

const bytes2 = await this.file.read(
cramContainerHeader2.maxLength(header1.numLandmarks),
Expand All @@ -116,12 +100,12 @@ export default class CramContainer {
)
}

const completeHeader = Object.assign(header1, header2, {
return {
...header1,
...header2,
_size: header1._size + header2._size - numLandmarksSize,
_endPosition: header1._size + header2._size - numLandmarksSize + position,
})

return completeHeader
}
}
}

Expand Down
69 changes: 32 additions & 37 deletions src/cramFile/file.ts
Original file line number Diff line number Diff line change
Expand Up @@ -102,11 +102,6 @@ export default class CramFile {
}
}

// can just stat this object like a filehandle
stat() {
return this.file.stat()
}

// can just stat this object like a filehandle
read(length: number, position: number) {
return this.file.read(length, position)
Expand Down Expand Up @@ -158,25 +153,20 @@ export default class CramFile {
const { majorVersion } = await this.getDefinition()
const sectionParsers = getSectionParsers(majorVersion)
let position = sectionParsers.cramFileDefinition.maxLength
const { size: fileSize } = await this.file.stat()
const { cramContainerHeader1 } = sectionParsers

Check warning on line 156 in src/cramFile/file.ts

View workflow job for this annotation

GitHub Actions / Lint, build, and test on node 20.x and ubuntu-latest

'cramContainerHeader1' is assigned a value but never used

// skip with a series of reads to the proper container
let currentContainer: CramContainer | undefined
for (let i = 0; i <= containerNumber; i++) {
// if we are about to go off the end of the file
// and have not found that container, it does not exist
if (position + cramContainerHeader1.maxLength + 8 >= fileSize) {
return undefined
}
// if (position + cramContainerHeader1.maxLength + 8 >= fileSize) {
// return undefined
// }

currentContainer = this.getContainerAtPosition(position)
const currentHeader = await currentContainer.getHeader()
if (!currentHeader) {
throw new CramMalformedError(
`container ${containerNumber} not found in file`,
)
}

// if this is the first container, read all the blocks in the container
// to determine its length, because we cannot trust the container
// header's given length due to a bug somewhere in htslib
Expand Down Expand Up @@ -219,39 +209,44 @@ export default class CramFile {

/**
* @returns {Promise[number]} the number of containers in the file
*
* note: this is currently used only in unit tests, and after removing file
* length check, relies on a try catch to read return an error to break
*/
async containerCount(): Promise<number | undefined> {
const { majorVersion } = await this.getDefinition()
const sectionParsers = getSectionParsers(majorVersion)
const { size: fileSize } = await this.file.stat()
const { cramContainerHeader1 } = sectionParsers

let containerCount = 0
let position = sectionParsers.cramFileDefinition.maxLength
while (position + cramContainerHeader1.maxLength + 8 < fileSize) {
const currentHeader =
await this.getContainerAtPosition(position).getHeader()
if (!currentHeader) {
break
}
// if this is the first container, read all the blocks in the container,
// because we cannot trust the container header's given length due to a
// bug somewhere in htslib
if (containerCount === 0) {
position = currentHeader._endPosition
for (let j = 0; j < currentHeader.numBlocks; j++) {
const block = await this.readBlock(position)
if (block === undefined) {
return undefined
try {
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
while (true) {
const currentHeader =
await this.getContainerAtPosition(position).getHeader()

// if this is the first container, read all the blocks in the container,
// because we cannot trust the container header's given length due to a
// bug somewhere in htslib
if (containerCount === 0) {
position = currentHeader._endPosition
for (let j = 0; j < currentHeader.numBlocks; j++) {
const block = await this.readBlock(position)
if (block === undefined) {
break
}
position = block._endPosition
}
position = block._endPosition
} else {
// otherwise, just traverse to the next container using the container's
// length
position += currentHeader._size + currentHeader.length
}
} else {
// otherwise, just traverse to the next container using the container's
// length
position += currentHeader._size + currentHeader.length
containerCount += 1
}
containerCount += 1
} catch (e) {
containerCount--
/* do nothing */
}

return containerCount
Expand Down
2 changes: 1 addition & 1 deletion src/cramFile/record.ts
Original file line number Diff line number Diff line change
Expand Up @@ -271,7 +271,7 @@ export default class CramRecord {

this.readGroupId = readGroupId
this.readName = readName
this.sequenceId = sequenceId
this.sequenceId = sequenceId!
this.uniqueId = uniqueId
this.templateSize = templateSize
this.alignmentStart = alignmentStart
Expand Down
49 changes: 26 additions & 23 deletions src/cramFile/slice/decodeRecord.ts
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,7 @@ function decodeReadFeatures(

export type DataSeriesDecoder = <T extends DataSeriesEncodingKey>(
dataSeriesName: T,
) => DataTypeMapping[DataSeriesTypes[T]]
) => DataTypeMapping[DataSeriesTypes[T]] | undefined

export default function decodeRecord(
slice: CramSlice,
Expand All @@ -224,12 +224,11 @@ export default function decodeRecord(
majorVersion: number,
recordNumber: number,
) {
let flags = decodeDataSeries('BF')
let flags = decodeDataSeries('BF')!

// note: the C data type of compressionFlags is byte in cram v1
// and int32 in cram v2+, but that does not matter for us here
// in javascript land.
const cramFlags = decodeDataSeries('CF')
// note: the C data type of compressionFlags is byte in cram v1 and int32 in
// cram v2+, but that does not matter for us here in javascript land.
const cramFlags = decodeDataSeries('CF')!

if (!isMappedSliceHeader(sliceHeader.parsedContent)) {
throw new Error('slice header not mapped')
Expand All @@ -240,18 +239,18 @@ export default function decodeRecord(
? decodeDataSeries('RI')
: sliceHeader.parsedContent.refSeqId

const readLength = decodeDataSeries('RL')
const readLength = decodeDataSeries('RL')!
// if APDelta, will calculate the true start in a second pass
let alignmentStart = decodeDataSeries('AP')
let alignmentStart = decodeDataSeries('AP')!
if (compressionScheme.APdelta) {
alignmentStart = alignmentStart + cursors.lastAlignmentStart
}
cursors.lastAlignmentStart = alignmentStart
const readGroupId = decodeDataSeries('RG')
const readGroupId = decodeDataSeries('RG')!

let readName: string | undefined
if (compressionScheme.readNamesIncluded) {
readName = readNullTerminatedString(decodeDataSeries('RN'))
readName = readNullTerminatedString(decodeDataSeries('RN')!)
}

let mateToUse:
Expand All @@ -268,14 +267,14 @@ export default function decodeRecord(
if (CramFlagsDecoder.isDetached(cramFlags)) {
// note: the MF is a byte in 1.0, int32 in 2+, but once again this doesn't
// matter for javascript
const mateFlags = decodeDataSeries('MF')
const mateFlags = decodeDataSeries('MF')!
let mateReadName: string | undefined
if (!compressionScheme.readNamesIncluded) {
mateReadName = readNullTerminatedString(decodeDataSeries('RN'))
mateReadName = readNullTerminatedString(decodeDataSeries('RN')!)
readName = mateReadName
}
const mateSequenceId = decodeDataSeries('NS')
const mateAlignmentStart = decodeDataSeries('NP')
const mateSequenceId = decodeDataSeries('NS')!
const mateAlignmentStart = decodeDataSeries('NP')!
if (mateFlags || mateSequenceId > -1) {
mateToUse = {
mateFlags,
Expand All @@ -285,7 +284,7 @@ export default function decodeRecord(
}
}

templateSize = decodeDataSeries('TS')
templateSize = decodeDataSeries('TS')!

// set mate unmapped if needed
if (MateFlagsDecoder.isUnmapped(mateFlags)) {
Expand All @@ -298,12 +297,12 @@ export default function decodeRecord(

// detachedCount++
} else if (CramFlagsDecoder.isWithMateDownstream(cramFlags)) {
mateRecordNumber = decodeDataSeries('NF') + recordNumber + 1
mateRecordNumber = decodeDataSeries('NF')! + recordNumber + 1
}

// TODO: the aux tag parsing will have to be refactored if we want to support
// cram v1
const TLindex = decodeDataSeries('TL')
const TLindex = decodeDataSeries('TL')!
if (TLindex < 0) {
/* TODO: check nTL: TLindex >= compressionHeader.tagEncoding.size */
throw new CramMalformedError('invalid TL index')
Expand All @@ -322,7 +321,11 @@ export default function decodeRecord(
.getCodecForTag(tagId)
.decode(slice, coreDataBlock, blocksByContentId, cursors)
tags[tagName] =
typeof tagData === 'number' ? tagData : parseTagData(tagType, tagData)
tagData === undefined
? undefined
: typeof tagData === 'number'
? tagData
: parseTagData(tagType, tagData)
}

let readFeatures: ReadFeature[] | undefined
Expand All @@ -332,7 +335,7 @@ export default function decodeRecord(
let readBases = undefined
if (!BamFlagsDecoder.isSegmentUnmapped(flags)) {
// reading read features
const readFeatureCount = decodeDataSeries('FN')
const readFeatureCount = decodeDataSeries('FN')!
if (readFeatureCount) {
readFeatures = decodeReadFeatures(
alignmentStart,
Expand Down Expand Up @@ -367,11 +370,11 @@ export default function decodeRecord(
}

// mapping quality
mappingQuality = decodeDataSeries('MQ')
mappingQuality = decodeDataSeries('MQ')!
if (CramFlagsDecoder.isPreservingQualityScores(cramFlags)) {
qualityScores = new Array(readLength)
for (let i = 0; i < qualityScores.length; i++) {
qualityScores[i] = decodeDataSeries('QS')
qualityScores[i] = decodeDataSeries('QS')!
}
}
} else if (CramFlagsDecoder.isDecodeSequenceAsStar(cramFlags)) {
Expand All @@ -380,14 +383,14 @@ export default function decodeRecord(
} else {
const bases = new Array(readLength) as number[]
for (let i = 0; i < bases.length; i++) {
bases[i] = decodeDataSeries('BA')
bases[i] = decodeDataSeries('BA')!
}
readBases = String.fromCharCode(...bases)

if (CramFlagsDecoder.isPreservingQualityScores(cramFlags)) {
qualityScores = new Array(readLength)
for (let i = 0; i < bases.length; i++) {
qualityScores[i] = decodeDataSeries('QS')
qualityScores[i] = decodeDataSeries('QS')!
}
}
}
Expand Down
Loading

0 comments on commit 57971d6

Please sign in to comment.