diff --git a/.config/dotnet-tools.json b/.config/dotnet-tools.json index a4713c52..00b1a064 100644 --- a/.config/dotnet-tools.json +++ b/.config/dotnet-tools.json @@ -3,7 +3,7 @@ "isRoot": true, "tools": { "csharpier": { - "version": "0.27.3", + "version": "0.28.1", "commands": [ "dotnet-csharpier" ] diff --git a/src/SharpCompress/Archives/AbstractArchive.cs b/src/SharpCompress/Archives/AbstractArchive.cs index cfac55f2..df54d781 100644 --- a/src/SharpCompress/Archives/AbstractArchive.cs +++ b/src/SharpCompress/Archives/AbstractArchive.cs @@ -12,39 +12,35 @@ public abstract class AbstractArchive : IArchive, IArchiveExtra where TEntry : IArchiveEntry where TVolume : IVolume { - private readonly LazyReadOnlyCollection lazyVolumes; - private readonly LazyReadOnlyCollection lazyEntries; + private readonly LazyReadOnlyCollection _lazyVolumes; + private readonly LazyReadOnlyCollection _lazyEntries; + private bool _disposed; + private readonly SourceStream? _sourceStream; public event EventHandler>? EntryExtractionBegin; public event EventHandler>? EntryExtractionEnd; public event EventHandler? CompressedBytesRead; public event EventHandler? FilePartExtractionBegin; - protected ReaderOptions ReaderOptions { get; } - private bool disposed; - protected SourceStream SrcStream; - - internal AbstractArchive(ArchiveType type, SourceStream srcStream) + internal AbstractArchive(ArchiveType type, SourceStream sourceStream) { Type = type; - ReaderOptions = srcStream.ReaderOptions; - SrcStream = srcStream; - lazyVolumes = new LazyReadOnlyCollection(LoadVolumes(SrcStream)); - lazyEntries = new LazyReadOnlyCollection(LoadEntries(Volumes)); + ReaderOptions = sourceStream.ReaderOptions; + _sourceStream = sourceStream; + _lazyVolumes = new LazyReadOnlyCollection(LoadVolumes(_sourceStream)); + _lazyEntries = new LazyReadOnlyCollection(LoadEntries(Volumes)); } -#nullable disable internal AbstractArchive(ArchiveType type) { Type = type; - lazyVolumes = new LazyReadOnlyCollection(Enumerable.Empty()); - lazyEntries = new LazyReadOnlyCollection(Enumerable.Empty()); + ReaderOptions = new(); + _lazyVolumes = new LazyReadOnlyCollection(Enumerable.Empty()); + _lazyEntries = new LazyReadOnlyCollection(Enumerable.Empty()); } -#nullable enable - public ArchiveType Type { get; } void IArchiveExtractionListener.FireEntryExtractionBegin(IArchiveEntry entry) => @@ -65,12 +61,12 @@ private static Stream CheckStreams(Stream stream) /// /// Returns an ReadOnlyCollection of all the RarArchiveEntries across the one or many parts of the RarArchive. /// - public virtual ICollection Entries => lazyEntries; + public virtual ICollection Entries => _lazyEntries; /// /// Returns an ReadOnlyCollection of all the RarArchiveVolumes across the one or many parts of the RarArchive. /// - public ICollection Volumes => lazyVolumes; + public ICollection Volumes => _lazyVolumes; /// /// The total size of the files compressed in the archive. @@ -84,29 +80,29 @@ private static Stream CheckStreams(Stream stream) public virtual long TotalUncompressSize => Entries.Aggregate(0L, (total, cf) => total + cf.Size); - protected abstract IEnumerable LoadVolumes(SourceStream srcStream); + protected abstract IEnumerable LoadVolumes(SourceStream sourceStream); protected abstract IEnumerable LoadEntries(IEnumerable volumes); IEnumerable IArchive.Entries => Entries.Cast(); - IEnumerable IArchive.Volumes => lazyVolumes.Cast(); + IEnumerable IArchive.Volumes => _lazyVolumes.Cast(); public virtual void Dispose() { - if (!disposed) + if (!_disposed) { - lazyVolumes.ForEach(v => v.Dispose()); - lazyEntries.GetLoaded().Cast().ForEach(x => x.Close()); - SrcStream?.Dispose(); + _lazyVolumes.ForEach(v => v.Dispose()); + _lazyEntries.GetLoaded().Cast().ForEach(x => x.Close()); + _sourceStream?.Dispose(); - disposed = true; + _disposed = true; } } void IArchiveExtractionListener.EnsureEntriesLoaded() { - lazyEntries.EnsureFullyLoaded(); - lazyVolumes.EnsureFullyLoaded(); + _lazyEntries.EnsureFullyLoaded(); + _lazyVolumes.EnsureFullyLoaded(); } void IExtractionListener.FireCompressedBytesRead( diff --git a/src/SharpCompress/Archives/AbstractWritableArchive.cs b/src/SharpCompress/Archives/AbstractWritableArchive.cs index 84ca0b3f..614489fe 100644 --- a/src/SharpCompress/Archives/AbstractWritableArchive.cs +++ b/src/SharpCompress/Archives/AbstractWritableArchive.cs @@ -41,8 +41,8 @@ public void Dispose() internal AbstractWritableArchive(ArchiveType type) : base(type) { } - internal AbstractWritableArchive(ArchiveType type, SourceStream srcStream) - : base(type, srcStream) { } + internal AbstractWritableArchive(ArchiveType type, SourceStream sourceStream) + : base(type, sourceStream) { } public override ICollection Entries { @@ -120,6 +120,10 @@ private bool DoesKeyMatchExisting(string key) { foreach (var path in Entries.Select(x => x.Key)) { + if (path is null) + { + continue; + } var p = path.Replace('/', '\\'); if (p.Length > 0 && p[0] == '\\') { diff --git a/src/SharpCompress/Archives/GZip/GZipArchive.cs b/src/SharpCompress/Archives/GZip/GZipArchive.cs index 70924b04..cec2f640 100644 --- a/src/SharpCompress/Archives/GZip/GZipArchive.cs +++ b/src/SharpCompress/Archives/GZip/GZipArchive.cs @@ -90,7 +90,7 @@ public static GZipArchive Open(Stream stream, ReaderOptions? readerOptions = nul { stream.CheckNotNull(nameof(stream)); return new GZipArchive( - new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions()) + new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions()) ); } @@ -99,16 +99,14 @@ public static GZipArchive Open(Stream stream, ReaderOptions? readerOptions = nul /// /// Constructor with a SourceStream able to handle FileInfo and Streams. /// - /// - /// - internal GZipArchive(SourceStream srcStream) - : base(ArchiveType.Tar, srcStream) { } + /// + private GZipArchive(SourceStream sourceStream) + : base(ArchiveType.Tar, sourceStream) { } - protected override IEnumerable LoadVolumes(SourceStream srcStream) + protected override IEnumerable LoadVolumes(SourceStream sourceStream) { - srcStream.LoadAllParts(); - var idx = 0; - return srcStream.Streams.Select(a => new GZipVolume(a, ReaderOptions, idx++)); + sourceStream.LoadAllParts(); + return sourceStream.Streams.Select(a => new GZipVolume(a, ReaderOptions, 0)); } public static bool IsGZipFile(string filePath) => IsGZipFile(new FileInfo(filePath)); @@ -184,7 +182,11 @@ IEnumerable newEntries foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory)) { using var entryStream = entry.OpenEntryStream(); - writer.Write(entry.Key, entryStream, entry.LastModifiedTime); + writer.Write( + entry.Key.NotNull("Entry Key is null"), + entryStream, + entry.LastModifiedTime + ); } } diff --git a/src/SharpCompress/Archives/GZip/GZipArchiveEntry.cs b/src/SharpCompress/Archives/GZip/GZipArchiveEntry.cs index be872e80..459d042d 100644 --- a/src/SharpCompress/Archives/GZip/GZipArchiveEntry.cs +++ b/src/SharpCompress/Archives/GZip/GZipArchiveEntry.cs @@ -6,7 +6,7 @@ namespace SharpCompress.Archives.GZip; public class GZipArchiveEntry : GZipEntry, IArchiveEntry { - internal GZipArchiveEntry(GZipArchive archive, GZipFilePart part) + internal GZipArchiveEntry(GZipArchive archive, GZipFilePart? part) : base(part) => Archive = archive; public virtual Stream OpenEntryStream() diff --git a/src/SharpCompress/Archives/GZip/GZipWritableArchiveEntry.cs b/src/SharpCompress/Archives/GZip/GZipWritableArchiveEntry.cs index 27dfc2bf..90f6f16f 100644 --- a/src/SharpCompress/Archives/GZip/GZipWritableArchiveEntry.cs +++ b/src/SharpCompress/Archives/GZip/GZipWritableArchiveEntry.cs @@ -1,5 +1,3 @@ -#nullable disable - using System; using System.Collections.Generic; using System.IO; @@ -32,7 +30,7 @@ bool closeStream public override long Crc => 0; - public override string Key { get; } + public override string? Key { get; } public override long CompressedSize => 0; diff --git a/src/SharpCompress/Archives/IArchiveEntryExtensions.cs b/src/SharpCompress/Archives/IArchiveEntryExtensions.cs index 0992f152..3d1daa1a 100644 --- a/src/SharpCompress/Archives/IArchiveEntryExtensions.cs +++ b/src/SharpCompress/Archives/IArchiveEntryExtensions.cs @@ -17,15 +17,11 @@ public static void WriteTo(this IArchiveEntry archiveEntry, Stream streamToWrite streamListener.EnsureEntriesLoaded(); streamListener.FireEntryExtractionBegin(archiveEntry); streamListener.FireFilePartExtractionBegin( - archiveEntry.Key, + archiveEntry.Key ?? "Key", archiveEntry.Size, archiveEntry.CompressedSize ); var entryStream = archiveEntry.OpenEntryStream(); - if (entryStream is null) - { - return; - } using (entryStream) { using Stream s = new ListeningStream(streamListener, entryStream); diff --git a/src/SharpCompress/Archives/IArchiveExtensions.cs b/src/SharpCompress/Archives/IArchiveExtensions.cs index 56ea0d9f..382c3ddf 100644 --- a/src/SharpCompress/Archives/IArchiveExtensions.cs +++ b/src/SharpCompress/Archives/IArchiveExtensions.cs @@ -3,7 +3,6 @@ using System.IO; using System.Linq; using System.Threading; -using System.Threading.Tasks; using SharpCompress.Common; namespace SharpCompress.Archives; @@ -59,7 +58,7 @@ public static void ExtractToDirectory( } // Create each directory - var path = Path.Combine(destination, entry.Key); + var path = Path.Combine(destination, entry.Key.NotNull("Entry Key is null")); if (Path.GetDirectoryName(path) is { } directory && seenDirectories.Add(path)) { Directory.CreateDirectory(directory); diff --git a/src/SharpCompress/Archives/Rar/FileInfoRarArchiveVolume.cs b/src/SharpCompress/Archives/Rar/FileInfoRarArchiveVolume.cs index 3e576db3..6d9b44fd 100644 --- a/src/SharpCompress/Archives/Rar/FileInfoRarArchiveVolume.cs +++ b/src/SharpCompress/Archives/Rar/FileInfoRarArchiveVolume.cs @@ -13,7 +13,7 @@ namespace SharpCompress.Archives.Rar; /// internal class FileInfoRarArchiveVolume : RarVolume { - internal FileInfoRarArchiveVolume(FileInfo fileInfo, ReaderOptions options, int index = 0) + internal FileInfoRarArchiveVolume(FileInfo fileInfo, ReaderOptions options, int index) : base(StreamingMode.Seekable, fileInfo.OpenRead(), FixOptions(options), index) { FileInfo = fileInfo; diff --git a/src/SharpCompress/Archives/Rar/RarArchive.cs b/src/SharpCompress/Archives/Rar/RarArchive.cs index b6472407..a05ad417 100644 --- a/src/SharpCompress/Archives/Rar/RarArchive.cs +++ b/src/SharpCompress/Archives/Rar/RarArchive.cs @@ -21,35 +21,33 @@ public class RarArchive : AbstractArchive /// /// Constructor with a SourceStream able to handle FileInfo and Streams. /// - /// - /// - internal RarArchive(SourceStream srcStream) - : base(ArchiveType.Rar, srcStream) { } + /// + private RarArchive(SourceStream sourceStream) + : base(ArchiveType.Rar, sourceStream) { } protected override IEnumerable LoadEntries(IEnumerable volumes) => RarArchiveEntryFactory.GetEntries(this, volumes, ReaderOptions); - protected override IEnumerable LoadVolumes(SourceStream srcStream) + protected override IEnumerable LoadVolumes(SourceStream sourceStream) { - SrcStream.LoadAllParts(); //request all streams - var streams = SrcStream.Streams.ToArray(); - var idx = 0; + sourceStream.LoadAllParts(); //request all streams + var streams = sourceStream.Streams.ToArray(); + var i = 0; if (streams.Length > 1 && IsRarFile(streams[1], ReaderOptions)) //test part 2 - true = multipart not split { - SrcStream.IsVolumes = true; + sourceStream.IsVolumes = true; streams[1].Position = 0; - SrcStream.Position = 0; + sourceStream.Position = 0; - return srcStream.Streams.Select(a => new StreamRarArchiveVolume( + return sourceStream.Streams.Select(a => new StreamRarArchiveVolume( a, ReaderOptions, - idx++ + i++ )); } - else //split mode or single file - { - return new StreamRarArchiveVolume(SrcStream, ReaderOptions, idx++).AsEnumerable(); - } + + //split mode or single file + return new StreamRarArchiveVolume(sourceStream, ReaderOptions, i++).AsEnumerable(); } protected override IReader CreateReaderForSolidExtraction() @@ -108,7 +106,7 @@ public static RarArchive Open(FileInfo fileInfo, ReaderOptions? options = null) public static RarArchive Open(Stream stream, ReaderOptions? options = null) { stream.CheckNotNull(nameof(stream)); - return new RarArchive(new SourceStream(stream, i => null, options ?? new ReaderOptions())); + return new RarArchive(new SourceStream(stream, _ => null, options ?? new ReaderOptions())); } /// diff --git a/src/SharpCompress/Archives/Rar/SeekableFilePart.cs b/src/SharpCompress/Archives/Rar/SeekableFilePart.cs index 13048d4c..97822d90 100644 --- a/src/SharpCompress/Archives/Rar/SeekableFilePart.cs +++ b/src/SharpCompress/Archives/Rar/SeekableFilePart.cs @@ -6,8 +6,8 @@ namespace SharpCompress.Archives.Rar; internal class SeekableFilePart : RarFilePart { - private readonly Stream stream; - private readonly string? password; + private readonly Stream _stream; + private readonly string? _password; internal SeekableFilePart( MarkHeader mh, @@ -18,27 +18,27 @@ internal SeekableFilePart( ) : base(mh, fh, index) { - this.stream = stream; - this.password = password; + _stream = stream; + _password = password; } internal override Stream GetCompressedStream() { - stream.Position = FileHeader.DataStartPosition; + _stream.Position = FileHeader.DataStartPosition; if (FileHeader.R4Salt != null) { - var cryptKey = new CryptKey3(password!); - return new RarCryptoWrapper(stream, FileHeader.R4Salt, cryptKey); + var cryptKey = new CryptKey3(_password!); + return new RarCryptoWrapper(_stream, FileHeader.R4Salt, cryptKey); } if (FileHeader.Rar5CryptoInfo != null) { - var cryptKey = new CryptKey5(password!, FileHeader.Rar5CryptoInfo); - return new RarCryptoWrapper(stream, FileHeader.Rar5CryptoInfo.Salt, cryptKey); + var cryptKey = new CryptKey5(_password!, FileHeader.Rar5CryptoInfo); + return new RarCryptoWrapper(_stream, FileHeader.Rar5CryptoInfo.Salt, cryptKey); } - return stream; + return _stream; } internal override string FilePartName => "Unknown Stream - File Entry: " + FileHeader.FileName; diff --git a/src/SharpCompress/Archives/Rar/StreamRarArchiveVolume.cs b/src/SharpCompress/Archives/Rar/StreamRarArchiveVolume.cs index 3eb5095c..60cbab9a 100644 --- a/src/SharpCompress/Archives/Rar/StreamRarArchiveVolume.cs +++ b/src/SharpCompress/Archives/Rar/StreamRarArchiveVolume.cs @@ -9,7 +9,7 @@ namespace SharpCompress.Archives.Rar; internal class StreamRarArchiveVolume : RarVolume { - internal StreamRarArchiveVolume(Stream stream, ReaderOptions options, int index = 0) + internal StreamRarArchiveVolume(Stream stream, ReaderOptions options, int index) : base(StreamingMode.Seekable, stream, options, index) { } internal override IEnumerable ReadFileParts() => GetVolumeFileParts(); diff --git a/src/SharpCompress/Archives/SevenZip/SevenZipArchive.cs b/src/SharpCompress/Archives/SevenZip/SevenZipArchive.cs index 7577b43d..ee165982 100644 --- a/src/SharpCompress/Archives/SevenZip/SevenZipArchive.cs +++ b/src/SharpCompress/Archives/SevenZip/SevenZipArchive.cs @@ -1,5 +1,3 @@ -#nullable disable - using System; using System.Collections.Generic; using System.IO; @@ -14,14 +12,14 @@ namespace SharpCompress.Archives.SevenZip; public class SevenZipArchive : AbstractArchive { - private ArchiveDatabase database; + private ArchiveDatabase? _database; /// /// Constructor expects a filepath to an existing file. /// /// /// - public static SevenZipArchive Open(string filePath, ReaderOptions readerOptions = null) + public static SevenZipArchive Open(string filePath, ReaderOptions? readerOptions = null) { filePath.CheckNotNullOrEmpty("filePath"); return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions()); @@ -32,7 +30,7 @@ public static SevenZipArchive Open(string filePath, ReaderOptions readerOptions /// /// /// - public static SevenZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null) + public static SevenZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) { fileInfo.CheckNotNull("fileInfo"); return new SevenZipArchive( @@ -51,7 +49,7 @@ public static SevenZipArchive Open(FileInfo fileInfo, ReaderOptions readerOption /// public static SevenZipArchive Open( IEnumerable fileInfos, - ReaderOptions readerOptions = null + ReaderOptions? readerOptions = null ) { fileInfos.CheckNotNull(nameof(fileInfos)); @@ -72,7 +70,7 @@ public static SevenZipArchive Open( /// public static SevenZipArchive Open( IEnumerable streams, - ReaderOptions readerOptions = null + ReaderOptions? readerOptions = null ) { streams.CheckNotNull(nameof(streams)); @@ -91,27 +89,25 @@ public static SevenZipArchive Open( /// /// /// - public static SevenZipArchive Open(Stream stream, ReaderOptions readerOptions = null) + public static SevenZipArchive Open(Stream stream, ReaderOptions? readerOptions = null) { stream.CheckNotNull("stream"); return new SevenZipArchive( - new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions()) + new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions()) ); } /// /// Constructor with a SourceStream able to handle FileInfo and Streams. /// - /// - /// - internal SevenZipArchive(SourceStream srcStream) - : base(ArchiveType.SevenZip, srcStream) { } + /// + private SevenZipArchive(SourceStream sourceStream) + : base(ArchiveType.SevenZip, sourceStream) { } - protected override IEnumerable LoadVolumes(SourceStream srcStream) + protected override IEnumerable LoadVolumes(SourceStream sourceStream) { - SrcStream.LoadAllParts(); //request all streams - var idx = 0; - return new SevenZipVolume(srcStream, ReaderOptions, idx++).AsEnumerable(); //simple single volume or split, multivolume not supported + sourceStream.NotNull("SourceStream is null").LoadAllParts(); //request all streams + return new SevenZipVolume(sourceStream, ReaderOptions, 0).AsEnumerable(); //simple single volume or split, multivolume not supported } public static bool IsSevenZipFile(string filePath) => IsSevenZipFile(new FileInfo(filePath)); @@ -135,13 +131,17 @@ IEnumerable volumes { var stream = volumes.Single().Stream; LoadFactory(stream); - var entries = new SevenZipArchiveEntry[database._files.Count]; - for (var i = 0; i < database._files.Count; i++) + if (_database is null) + { + return Enumerable.Empty(); + } + var entries = new SevenZipArchiveEntry[_database._files.Count]; + for (var i = 0; i < _database._files.Count; i++) { - var file = database._files[i]; + var file = _database._files[i]; entries[i] = new SevenZipArchiveEntry( this, - new SevenZipFilePart(stream, database, i, file, ReaderOptions.ArchiveEncoding) + new SevenZipFilePart(stream, _database, i, file, ReaderOptions.ArchiveEncoding) ); } foreach (var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder)) @@ -159,12 +159,12 @@ IEnumerable volumes private void LoadFactory(Stream stream) { - if (database is null) + if (_database is null) { stream.Position = 0; var reader = new ArchiveReader(); reader.Open(stream); - database = reader.ReadDatabase(new PasswordProvider(ReaderOptions.Password)); + _database = reader.ReadDatabase(new PasswordProvider(ReaderOptions.Password)); } } @@ -180,14 +180,14 @@ public static bool IsSevenZipFile(Stream stream) } } - private static ReadOnlySpan SIGNATURE => + private static ReadOnlySpan Signature => new byte[] { (byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C }; private static bool SignatureMatch(Stream stream) { var reader = new BinaryReader(stream); ReadOnlySpan signatureBytes = reader.ReadBytes(6); - return signatureBytes.SequenceEqual(SIGNATURE); + return signatureBytes.SequenceEqual(Signature); } protected override IReader CreateReaderForSolidExtraction() => @@ -196,30 +196,24 @@ protected override IReader CreateReaderForSolidExtraction() => public override bool IsSolid => Entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder).Count() > 1; - public override long TotalSize - { - get - { - var i = Entries.Count; - return database._packSizes.Aggregate(0L, (total, packSize) => total + packSize); - } - } + public override long TotalSize => + _database?._packSizes.Aggregate(0L, (total, packSize) => total + packSize) ?? 0; private sealed class SevenZipReader : AbstractReader { - private readonly SevenZipArchive archive; - private CFolder currentFolder; - private Stream currentStream; - private CFileItem currentItem; + private readonly SevenZipArchive _archive; + private CFolder? _currentFolder; + private Stream? _currentStream; + private CFileItem? _currentItem; internal SevenZipReader(ReaderOptions readerOptions, SevenZipArchive archive) - : base(readerOptions, ArchiveType.SevenZip) => this.archive = archive; + : base(readerOptions, ArchiveType.SevenZip) => this._archive = archive; - public override SevenZipVolume Volume => archive.Volumes.Single(); + public override SevenZipVolume Volume => _archive.Volumes.Single(); protected override IEnumerable GetEntries(Stream stream) { - var entries = archive.Entries.ToList(); + var entries = _archive.Entries.ToList(); stream.Position = 0; foreach (var dir in entries.Where(x => x.IsDirectory)) { @@ -229,37 +223,42 @@ protected override IEnumerable GetEntries(Stream stream) var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder) ) { - currentFolder = group.Key; + _currentFolder = group.Key; if (group.Key is null) { - currentStream = Stream.Null; + _currentStream = Stream.Null; } else { - currentStream = archive.database.GetFolderStream( + _currentStream = _archive._database?.GetFolderStream( stream, - currentFolder, + _currentFolder, new PasswordProvider(Options.Password) ); } foreach (var entry in group) { - currentItem = entry.FilePart.Header; + _currentItem = entry.FilePart.Header; yield return entry; } } } protected override EntryStream GetEntryStream() => - CreateEntryStream(new ReadOnlySubStream(currentStream, currentItem.Size)); + CreateEntryStream( + new ReadOnlySubStream( + _currentStream.NotNull("currentStream is not null"), + _currentItem?.Size ?? 0 + ) + ); } private class PasswordProvider : IPasswordProvider { - private readonly string _password; + private readonly string? _password; - public PasswordProvider(string password) => _password = password; + public PasswordProvider(string? password) => _password = password; - public string CryptoGetTextPassword() => _password; + public string? CryptoGetTextPassword() => _password; } } diff --git a/src/SharpCompress/Archives/Tar/TarArchive.cs b/src/SharpCompress/Archives/Tar/TarArchive.cs index 70ac2413..3b9a45ed 100644 --- a/src/SharpCompress/Archives/Tar/TarArchive.cs +++ b/src/SharpCompress/Archives/Tar/TarArchive.cs @@ -114,7 +114,7 @@ public static bool IsTarFile(Stream stream) var tarHeader = new TarHeader(new ArchiveEncoding()); var readSucceeded = tarHeader.Read(new BinaryReader(stream)); var isEmptyArchive = - tarHeader.Name.Length == 0 + tarHeader.Name?.Length == 0 && tarHeader.Size == 0 && Enum.IsDefined(typeof(EntryType), tarHeader.EntryType); return readSucceeded || isEmptyArchive; @@ -123,22 +123,20 @@ public static bool IsTarFile(Stream stream) return false; } - protected override IEnumerable LoadVolumes(SourceStream srcStream) + protected override IEnumerable LoadVolumes(SourceStream sourceStream) { - SrcStream.LoadAllParts(); //request all streams - var idx = 0; - return new TarVolume(srcStream, ReaderOptions, idx++).AsEnumerable(); //simple single volume or split, multivolume not supported + sourceStream.NotNull("SourceStream is null").LoadAllParts(); //request all streams + return new TarVolume(sourceStream, ReaderOptions, 1).AsEnumerable(); //simple single volume or split, multivolume not supported } /// /// Constructor with a SourceStream able to handle FileInfo and Streams. /// - /// - /// - internal TarArchive(SourceStream srcStream) - : base(ArchiveType.Tar, srcStream) { } + /// + private TarArchive(SourceStream sourceStream) + : base(ArchiveType.Tar, sourceStream) { } - internal TarArchive() + private TarArchive() : base(ArchiveType.Tar) { } protected override IEnumerable LoadEntries(IEnumerable volumes) @@ -225,7 +223,12 @@ IEnumerable newEntries foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory)) { using var entryStream = entry.OpenEntryStream(); - writer.Write(entry.Key, entryStream, entry.LastModifiedTime, entry.Size); + writer.Write( + entry.Key.NotNull("Entry Key is null"), + entryStream, + entry.LastModifiedTime, + entry.Size + ); } } diff --git a/src/SharpCompress/Archives/Tar/TarArchiveEntry.cs b/src/SharpCompress/Archives/Tar/TarArchiveEntry.cs index 2da84d71..d04c4ef8 100644 --- a/src/SharpCompress/Archives/Tar/TarArchiveEntry.cs +++ b/src/SharpCompress/Archives/Tar/TarArchiveEntry.cs @@ -7,7 +7,7 @@ namespace SharpCompress.Archives.Tar; public class TarArchiveEntry : TarEntry, IArchiveEntry { - internal TarArchiveEntry(TarArchive archive, TarFilePart part, CompressionType compressionType) + internal TarArchiveEntry(TarArchive archive, TarFilePart? part, CompressionType compressionType) : base(part, compressionType) => Archive = archive; public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream(); diff --git a/src/SharpCompress/Archives/Tar/TarWritableArchiveEntry.cs b/src/SharpCompress/Archives/Tar/TarWritableArchiveEntry.cs index 0bbea709..32be7d9b 100644 --- a/src/SharpCompress/Archives/Tar/TarWritableArchiveEntry.cs +++ b/src/SharpCompress/Archives/Tar/TarWritableArchiveEntry.cs @@ -1,5 +1,3 @@ -#nullable disable - using System; using System.Collections.Generic; using System.IO; diff --git a/src/SharpCompress/Archives/Zip/ZipArchive.cs b/src/SharpCompress/Archives/Zip/ZipArchive.cs index 87e9ac25..85130f70 100644 --- a/src/SharpCompress/Archives/Zip/ZipArchive.cs +++ b/src/SharpCompress/Archives/Zip/ZipArchive.cs @@ -16,10 +16,7 @@ namespace SharpCompress.Archives.Zip; public class ZipArchive : AbstractWritableArchive { -#nullable disable - private readonly SeekableZipHeaderFactory headerFactory; - -#nullable enable + private readonly SeekableZipHeaderFactory? headerFactory; /// /// Gets or sets the compression level applied to files added to the archive, @@ -30,13 +27,13 @@ public class ZipArchive : AbstractWritableArchive /// /// Constructor with a SourceStream able to handle FileInfo and Streams. /// - /// + /// /// - internal ZipArchive(SourceStream srcStream) - : base(ArchiveType.Zip, srcStream) => + internal ZipArchive(SourceStream sourceStream) + : base(ArchiveType.Zip, sourceStream) => headerFactory = new SeekableZipHeaderFactory( - srcStream.ReaderOptions.Password, - srcStream.ReaderOptions.ArchiveEncoding + sourceStream.ReaderOptions.Password, + sourceStream.ReaderOptions.ArchiveEncoding ); /// @@ -189,21 +186,21 @@ public static bool IsZipMulti(Stream stream, string? password = null) } } - protected override IEnumerable LoadVolumes(SourceStream srcStream) + protected override IEnumerable LoadVolumes(SourceStream stream) { - SrcStream.LoadAllParts(); //request all streams - SrcStream.Position = 0; + stream.LoadAllParts(); //request all streams + stream.Position = 0; - var streams = SrcStream.Streams.ToList(); + var streams = stream.Streams.ToList(); var idx = 0; - if (streams.Count > 1) //test part 2 - true = multipart not split + if (streams.Count() > 1) //test part 2 - true = multipart not split { streams[1].Position += 4; //skip the POST_DATA_DESCRIPTOR to prevent an exception var isZip = IsZipFile(streams[1], ReaderOptions.Password); streams[1].Position -= 4; if (isZip) { - SrcStream.IsVolumes = true; + stream.IsVolumes = true; var tmp = streams[0]; //arcs as zip, z01 ... swap the zip the end streams.RemoveAt(0); @@ -215,7 +212,7 @@ protected override IEnumerable LoadVolumes(SourceStream srcStream) } //split mode or single file - return new ZipVolume(SrcStream, ReaderOptions, idx++).AsEnumerable(); + return new ZipVolume(stream, ReaderOptions, idx++).AsEnumerable(); } internal ZipArchive() @@ -224,14 +221,13 @@ internal ZipArchive() protected override IEnumerable LoadEntries(IEnumerable volumes) { var vols = volumes.ToArray(); - foreach (var h in headerFactory.ReadSeekableHeader(vols.Last().Stream)) + foreach (var h in headerFactory.NotNull().ReadSeekableHeader(vols.Last().Stream)) { if (h != null) { switch (h.ZipHeaderType) { case ZipHeaderType.DirectoryEntry: - { var deh = (DirectoryEntryHeader)h; Stream s; @@ -254,14 +250,14 @@ protected override IEnumerable LoadEntries(IEnumerable(); - volumes.Last().Comment = ReaderOptions.ArchiveEncoding.Decode(bytes); + vols.Last().Comment = ReaderOptions.ArchiveEncoding.Decode(bytes); yield break; } } @@ -282,7 +278,11 @@ IEnumerable newEntries foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory)) { using var entryStream = entry.OpenEntryStream(); - writer.Write(entry.Key, entryStream, entry.LastModifiedTime); + writer.Write( + entry.Key.NotNull("Entry Key is null"), + entryStream, + entry.LastModifiedTime + ); } } diff --git a/src/SharpCompress/Common/ArchiveEncoding.cs b/src/SharpCompress/Common/ArchiveEncoding.cs index f66044d3..3701a93b 100644 --- a/src/SharpCompress/Common/ArchiveEncoding.cs +++ b/src/SharpCompress/Common/ArchiveEncoding.cs @@ -8,12 +8,12 @@ public class ArchiveEncoding /// /// Default encoding to use when archive format doesn't specify one. /// - public Encoding Default { get; set; } + public Encoding? Default { get; set; } /// /// ArchiveEncoding used by encryption schemes which don't comply with RFC 2898. /// - public Encoding Password { get; set; } + public Encoding? Password { get; set; } /// /// Set this encoding when you want to force it for all encoding operations. @@ -50,6 +50,8 @@ public string Decode(byte[] bytes, int start, int length) => public Encoding GetEncoding() => Forced ?? Default ?? Encoding.UTF8; + public Encoding GetPasswordEncoding() => Password ?? Encoding.UTF8; + public Func GetDecoder() => CustomDecoder ?? ((bytes, index, count) => GetEncoding().GetString(bytes, index, count)); } diff --git a/src/SharpCompress/Common/Entry.cs b/src/SharpCompress/Common/Entry.cs index 85219a43..6209b3de 100644 --- a/src/SharpCompress/Common/Entry.cs +++ b/src/SharpCompress/Common/Entry.cs @@ -14,7 +14,7 @@ public abstract class Entry : IEntry /// /// The string key of the file internal to the Archive. /// - public abstract string Key { get; } + public abstract string? Key { get; } /// /// The target of a symlink entry internal to the Archive. Will be null if not a symlink. @@ -71,11 +71,11 @@ public abstract class Entry : IEntry /// public abstract bool IsSplitAfter { get; } - public int VolumeIndexFirst => Parts?.FirstOrDefault()?.Index ?? 0; - public int VolumeIndexLast => Parts?.LastOrDefault()?.Index ?? 0; + public int VolumeIndexFirst => Parts.FirstOrDefault()?.Index ?? 0; + public int VolumeIndexLast => Parts.LastOrDefault()?.Index ?? 0; /// - public override string ToString() => Key; + public override string ToString() => Key ?? "Entry"; internal abstract IEnumerable Parts { get; } diff --git a/src/SharpCompress/Common/ExtractionMethods.cs b/src/SharpCompress/Common/ExtractionMethods.cs index a470637a..27d41648 100644 --- a/src/SharpCompress/Common/ExtractionMethods.cs +++ b/src/SharpCompress/Common/ExtractionMethods.cs @@ -36,10 +36,11 @@ public static void WriteEntryToDirectory( options ??= new ExtractionOptions() { Overwrite = true }; - var file = Path.GetFileName(entry.Key); + var file = Path.GetFileName(entry.Key.NotNull("Entry Key is null")).NotNull("File is null"); if (options.ExtractFullPath) { - var folder = Path.GetDirectoryName(entry.Key)!; + var folder = Path.GetDirectoryName(entry.Key.NotNull("Entry Key is null")) + .NotNull("Directory is null"); var destdir = Path.GetFullPath(Path.Combine(fullDestinationDirectoryPath, folder)); if (!Directory.Exists(destdir)) diff --git a/src/SharpCompress/Common/FilePart.cs b/src/SharpCompress/Common/FilePart.cs index 3c286d54..23b8b400 100644 --- a/src/SharpCompress/Common/FilePart.cs +++ b/src/SharpCompress/Common/FilePart.cs @@ -8,7 +8,7 @@ public abstract class FilePart internal ArchiveEncoding ArchiveEncoding { get; } - internal abstract string FilePartName { get; } + internal abstract string? FilePartName { get; } public int Index { get; set; } internal abstract Stream GetCompressedStream(); diff --git a/src/SharpCompress/Common/GZip/GZipEntry.cs b/src/SharpCompress/Common/GZip/GZipEntry.cs index bb9a22da..9a551d07 100644 --- a/src/SharpCompress/Common/GZip/GZipEntry.cs +++ b/src/SharpCompress/Common/GZip/GZipEntry.cs @@ -6,23 +6,23 @@ namespace SharpCompress.Common.GZip; public class GZipEntry : Entry { - private readonly GZipFilePart _filePart; + private readonly GZipFilePart? _filePart; - internal GZipEntry(GZipFilePart filePart) => _filePart = filePart; + internal GZipEntry(GZipFilePart? filePart) => _filePart = filePart; public override CompressionType CompressionType => CompressionType.GZip; - public override long Crc => _filePart.Crc ?? 0; + public override long Crc => _filePart?.Crc ?? 0; - public override string Key => _filePart.FilePartName; + public override string? Key => _filePart?.FilePartName; public override string? LinkTarget => null; public override long CompressedSize => 0; - public override long Size => _filePart.UncompressedSize ?? 0; + public override long Size => _filePart?.UncompressedSize ?? 0; - public override DateTime? LastModifiedTime => _filePart.DateModified; + public override DateTime? LastModifiedTime => _filePart?.DateModified; public override DateTime? CreatedTime => null; @@ -36,7 +36,7 @@ public class GZipEntry : Entry public override bool IsSplitAfter => false; - internal override IEnumerable Parts => _filePart.AsEnumerable(); + internal override IEnumerable Parts => _filePart.Empty(); internal static IEnumerable GetEntries(Stream stream, OptionsBase options) { diff --git a/src/SharpCompress/Common/GZip/GZipFilePart.cs b/src/SharpCompress/Common/GZip/GZipFilePart.cs index fbf4ee45..4a1c9515 100644 --- a/src/SharpCompress/Common/GZip/GZipFilePart.cs +++ b/src/SharpCompress/Common/GZip/GZipFilePart.cs @@ -34,7 +34,7 @@ internal GZipFilePart(Stream stream, ArchiveEncoding archiveEncoding) internal uint? Crc { get; private set; } internal uint? UncompressedSize { get; private set; } - internal override string FilePartName => _name!; + internal override string? FilePartName => _name; internal override Stream GetCompressedStream() => new DeflateStream(_stream, CompressionMode.Decompress, CompressionLevel.Default); diff --git a/src/SharpCompress/Common/GZip/GZipVolume.cs b/src/SharpCompress/Common/GZip/GZipVolume.cs index 0dd9b8d9..600ba8e3 100644 --- a/src/SharpCompress/Common/GZip/GZipVolume.cs +++ b/src/SharpCompress/Common/GZip/GZipVolume.cs @@ -5,7 +5,7 @@ namespace SharpCompress.Common.GZip; public class GZipVolume : Volume { - public GZipVolume(Stream stream, ReaderOptions options, int index = 0) + public GZipVolume(Stream stream, ReaderOptions? options, int index) : base(stream, options, index) { } public GZipVolume(FileInfo fileInfo, ReaderOptions options) diff --git a/src/SharpCompress/Common/IEntry.cs b/src/SharpCompress/Common/IEntry.cs index df1fa603..56e1db81 100644 --- a/src/SharpCompress/Common/IEntry.cs +++ b/src/SharpCompress/Common/IEntry.cs @@ -9,7 +9,7 @@ public interface IEntry long CompressedSize { get; } long Crc { get; } DateTime? CreatedTime { get; } - string Key { get; } + string? Key { get; } string? LinkTarget { get; } bool IsDirectory { get; } bool IsEncrypted { get; } diff --git a/src/SharpCompress/Common/IVolume.cs b/src/SharpCompress/Common/IVolume.cs index abbc0406..aed35fd0 100644 --- a/src/SharpCompress/Common/IVolume.cs +++ b/src/SharpCompress/Common/IVolume.cs @@ -6,5 +6,5 @@ public interface IVolume : IDisposable { int Index { get; } - string FileName { get; } + string? FileName { get; } } diff --git a/src/SharpCompress/Common/Rar/Headers/FileHeader.cs b/src/SharpCompress/Common/Rar/Headers/FileHeader.cs index 86b56055..eea8293d 100644 --- a/src/SharpCompress/Common/Rar/Headers/FileHeader.cs +++ b/src/SharpCompress/Common/Rar/Headers/FileHeader.cs @@ -121,7 +121,6 @@ private void ReadFromReaderV5(MarkingBinaryReader reader) switch (type) { case FHEXTRA_CRYPT: // file encryption - { Rar5CryptoInfo = new Rar5CryptoInfo(reader, true); @@ -132,7 +131,6 @@ private void ReadFromReaderV5(MarkingBinaryReader reader) } break; case FHEXTRA_HASH: - { const uint FHEXTRA_HASH_BLAKE2 = 0x0; // const uint HASH_BLAKE2 = 0x03; @@ -146,7 +144,6 @@ private void ReadFromReaderV5(MarkingBinaryReader reader) } break; case FHEXTRA_HTIME: // file time - { var flags = reader.ReadRarVIntUInt16(); var isWindowsTime = (flags & 1) == 0; @@ -171,7 +168,6 @@ private void ReadFromReaderV5(MarkingBinaryReader reader) // } // break; case FHEXTRA_REDIR: // file system redirection - { RedirType = reader.ReadRarVIntByte(); RedirFlags = reader.ReadRarVIntByte(); @@ -284,7 +280,6 @@ private void ReadFromReaderV4(MarkingBinaryReader reader) switch (HeaderCode) { case HeaderCodeV.RAR4_FILE_HEADER: - { if (HasFlag(FileFlagsV4.UNICODE)) { @@ -311,7 +306,6 @@ private void ReadFromReaderV4(MarkingBinaryReader reader) } break; case HeaderCodeV.RAR4_NEW_SUB_HEADER: - { var datasize = HeaderSize - newLhdSize - nameSize; if (HasFlag(FileFlagsV4.SALT)) diff --git a/src/SharpCompress/Common/Rar/Headers/RarHeaderFactory.cs b/src/SharpCompress/Common/Rar/Headers/RarHeaderFactory.cs index e38cb629..74d68fc7 100644 --- a/src/SharpCompress/Common/Rar/Headers/RarHeaderFactory.cs +++ b/src/SharpCompress/Common/Rar/Headers/RarHeaderFactory.cs @@ -98,13 +98,11 @@ public IEnumerable ReadHeaders(Stream stream) switch (StreamingMode) { case StreamingMode.Seekable: - { reader.BaseStream.Position += ph.DataSize; } break; case StreamingMode.Streaming: - { reader.BaseStream.Skip(ph.DataSize); } @@ -146,14 +144,12 @@ public IEnumerable ReadHeaders(Stream stream) switch (StreamingMode) { case StreamingMode.Seekable: - { fh.DataStartPosition = reader.BaseStream.Position; reader.BaseStream.Position += fh.CompressedSize; } break; case StreamingMode.Streaming: - { var ms = new ReadOnlySubStream(reader.BaseStream, fh.CompressedSize); if (fh.R4Salt is null && fh.Rar5CryptoInfo is null) @@ -204,14 +200,12 @@ private void SkipData(FileHeader fh, RarCrcBinaryReader reader) switch (StreamingMode) { case StreamingMode.Seekable: - { fh.DataStartPosition = reader.BaseStream.Position; reader.BaseStream.Position += fh.CompressedSize; } break; case StreamingMode.Streaming: - { //skip the data because it's useless? reader.BaseStream.Skip(fh.CompressedSize); diff --git a/src/SharpCompress/Common/Rar/RarCryptoWrapper.cs b/src/SharpCompress/Common/Rar/RarCryptoWrapper.cs index a9798609..5500af8d 100644 --- a/src/SharpCompress/Common/Rar/RarCryptoWrapper.cs +++ b/src/SharpCompress/Common/Rar/RarCryptoWrapper.cs @@ -70,11 +70,11 @@ public override void Write(byte[] buffer, int offset, int count) => protected override void Dispose(bool disposing) { - if (_rijndael != null) + if (disposing) { _rijndael.Dispose(); - _rijndael = null!; } + base.Dispose(disposing); } } diff --git a/src/SharpCompress/Common/Rar/RarEntry.cs b/src/SharpCompress/Common/Rar/RarEntry.cs index 30de1037..a064c2f8 100644 --- a/src/SharpCompress/Common/Rar/RarEntry.cs +++ b/src/SharpCompress/Common/Rar/RarEntry.cs @@ -25,7 +25,7 @@ public abstract class RarEntry : Entry /// /// The path of the file internal to the Rar Archive. /// - public override string Key => FileHeader.FileName; + public override string? Key => FileHeader.FileName; public override string? LinkTarget => null; diff --git a/src/SharpCompress/Common/Rar/RarVolume.cs b/src/SharpCompress/Common/Rar/RarVolume.cs index da42ff24..f3d55393 100644 --- a/src/SharpCompress/Common/Rar/RarVolume.cs +++ b/src/SharpCompress/Common/Rar/RarVolume.cs @@ -15,17 +15,14 @@ namespace SharpCompress.Common.Rar; public abstract class RarVolume : Volume { private readonly RarHeaderFactory _headerFactory; - internal int _maxCompressionAlgorithm; + private int _maxCompressionAlgorithm; - internal RarVolume(StreamingMode mode, Stream stream, ReaderOptions options, int index = 0) + internal RarVolume(StreamingMode mode, Stream stream, ReaderOptions options, int index) : base(stream, options, index) => _headerFactory = new RarHeaderFactory(mode, options); -#nullable disable - internal ArchiveHeader ArchiveHeader { get; private set; } + private ArchiveHeader? ArchiveHeader { get; set; } -#nullable enable - - internal StreamingMode Mode => _headerFactory.StreamingMode; + private StreamingMode Mode => _headerFactory.StreamingMode; internal abstract IEnumerable ReadFileParts(); @@ -39,19 +36,16 @@ internal IEnumerable GetVolumeFileParts() switch (header.HeaderType) { case HeaderType.Mark: - { lastMarkHeader = (MarkHeader)header; } break; case HeaderType.Archive: - { ArchiveHeader = (ArchiveHeader)header; } break; case HeaderType.File: - { var fh = (FileHeader)header; if (_maxCompressionAlgorithm < fh.CompressionAlgorithm) @@ -63,7 +57,6 @@ internal IEnumerable GetVolumeFileParts() } break; case HeaderType.Service: - { var fh = (FileHeader)header; if (fh.FileName == "CMT") @@ -105,7 +98,7 @@ public override bool IsFirstVolume get { EnsureArchiveHeaderLoaded(); - return ArchiveHeader.IsFirstVolume; + return ArchiveHeader?.IsFirstVolume ?? false; } } @@ -117,7 +110,7 @@ public override bool IsMultiVolume get { EnsureArchiveHeaderLoaded(); - return ArchiveHeader.IsVolume; + return ArchiveHeader?.IsVolume ?? false; } } @@ -130,7 +123,7 @@ public bool IsSolidArchive get { EnsureArchiveHeaderLoaded(); - return ArchiveHeader.IsSolid; + return ArchiveHeader?.IsSolid ?? false; } } diff --git a/src/SharpCompress/Common/SevenZip/ArchiveDatabase.cs b/src/SharpCompress/Common/SevenZip/ArchiveDatabase.cs index ee6f6d08..4bac08ad 100644 --- a/src/SharpCompress/Common/SevenZip/ArchiveDatabase.cs +++ b/src/SharpCompress/Common/SevenZip/ArchiveDatabase.cs @@ -35,7 +35,7 @@ internal void Clear() _packSizes.Clear(); _packCrCs.Clear(); _folders.Clear(); - _numUnpackStreamsVector = null!; + _numUnpackStreamsVector = null; _files.Clear(); _packStreamStartPositions.Clear(); diff --git a/src/SharpCompress/Common/SevenZip/SevenZipEntry.cs b/src/SharpCompress/Common/SevenZip/SevenZipEntry.cs index 66170ba8..fbcb3d00 100644 --- a/src/SharpCompress/Common/SevenZip/SevenZipEntry.cs +++ b/src/SharpCompress/Common/SevenZip/SevenZipEntry.cs @@ -13,7 +13,7 @@ public class SevenZipEntry : Entry public override long Crc => FilePart.Header.Crc ?? 0; - public override string Key => FilePart.Header.Name; + public override string? Key => FilePart.Header.Name; public override string? LinkTarget => null; diff --git a/src/SharpCompress/Common/SevenZip/SevenZipFilePart.cs b/src/SharpCompress/Common/SevenZip/SevenZipFilePart.cs index aad191c9..fe221b53 100644 --- a/src/SharpCompress/Common/SevenZip/SevenZipFilePart.cs +++ b/src/SharpCompress/Common/SevenZip/SevenZipFilePart.cs @@ -41,7 +41,7 @@ internal override Stream GetCompressedStream() { if (!Header.HasStream) { - return null!; + throw new InvalidOperationException("File does not have a stream."); } var folderStream = _database.GetFolderStream(_stream, Folder!, _database.PasswordProvider); @@ -73,34 +73,24 @@ public CompressionType CompressionType private const uint K_PPMD = 0x030401; private const uint K_B_ZIP2 = 0x040202; - internal CompressionType GetCompression() + private CompressionType GetCompression() { if (Header.IsDir) + { return CompressionType.None; + } - var coder = Folder!._coders.First(); - switch (coder._methodId._id) + var coder = Folder.NotNull()._coders.First(); + return coder._methodId._id switch { - case K_LZMA: - case K_LZMA2: - { - return CompressionType.LZMA; - } - case K_PPMD: - { - return CompressionType.PPMd; - } - case K_B_ZIP2: - { - return CompressionType.BZip2; - } - default: - throw new NotImplementedException(); - } + K_LZMA or K_LZMA2 => CompressionType.LZMA, + K_PPMD => CompressionType.PPMd, + K_B_ZIP2 => CompressionType.BZip2, + _ => throw new NotImplementedException() + }; } internal bool IsEncrypted => - Header.IsDir - ? false - : Folder!._coders.FindIndex(c => c._methodId._id == CMethodId.K_AES_ID) != -1; + !Header.IsDir + && Folder?._coders.FindIndex(c => c._methodId._id == CMethodId.K_AES_ID) != -1; } diff --git a/src/SharpCompress/Common/Tar/Headers/TarHeader.cs b/src/SharpCompress/Common/Tar/Headers/TarHeader.cs index a59b74f6..68a72e8a 100644 --- a/src/SharpCompress/Common/Tar/Headers/TarHeader.cs +++ b/src/SharpCompress/Common/Tar/Headers/TarHeader.cs @@ -1,5 +1,3 @@ -#nullable disable - using System; using System.Buffers.Binary; using System.IO; @@ -13,8 +11,8 @@ internal sealed class TarHeader public TarHeader(ArchiveEncoding archiveEncoding) => ArchiveEncoding = archiveEncoding; - internal string Name { get; set; } - internal string LinkName { get; set; } + internal string? Name { get; set; } + internal string? LinkName { get; set; } internal long Mode { get; set; } internal long UserId { get; set; } @@ -22,7 +20,7 @@ internal sealed class TarHeader internal long Size { get; set; } internal DateTime LastModifiedTime { get; set; } internal EntryType EntryType { get; set; } - internal Stream PackedStream { get; set; } + internal Stream? PackedStream { get; set; } internal ArchiveEncoding ArchiveEncoding { get; } internal const int BLOCK_SIZE = 512; @@ -36,7 +34,9 @@ internal void Write(Stream output) WriteOctalBytes(0, buffer, 116, 8); // group ID //ArchiveEncoding.UTF8.GetBytes("magic").CopyTo(buffer, 257); - var nameByteCount = ArchiveEncoding.GetEncoding().GetByteCount(Name); + var nameByteCount = ArchiveEncoding + .GetEncoding() + .GetByteCount(Name.NotNull("Name is null")); if (nameByteCount > 100) { // Set mock filename and filetype to indicate the next block is the actual name of the file @@ -46,7 +46,7 @@ internal void Write(Stream output) } else { - WriteStringBytes(ArchiveEncoding.Encode(Name), buffer, 100); + WriteStringBytes(ArchiveEncoding.Encode(Name.NotNull("Name is null")), buffer, 100); WriteOctalBytes(Size, buffer, 124, 12); var time = (long)(LastModifiedTime.ToUniversalTime() - EPOCH).TotalSeconds; WriteOctalBytes(time, buffer, 136, 12); @@ -77,7 +77,7 @@ internal void Write(Stream output) // // and then infinite recursion is occured in WriteLongFilenameHeader because truncated.Length is 102. Name = ArchiveEncoding.Decode( - ArchiveEncoding.Encode(Name), + ArchiveEncoding.Encode(Name.NotNull("Name is null")), 0, 100 - ArchiveEncoding.GetEncoding().GetMaxByteCount(1) ); @@ -87,7 +87,7 @@ internal void Write(Stream output) private void WriteLongFilenameHeader(Stream output) { - var nameBytes = ArchiveEncoding.Encode(Name); + var nameBytes = ArchiveEncoding.Encode(Name.NotNull("Name is null")); output.Write(nameBytes, 0, nameBytes.Length); // pad to multiple of BlockSize bytes, and make sure a terminating null is added @@ -323,5 +323,5 @@ internal static int RecalculateAltChecksum(byte[] buf) public long? DataStartPosition { get; set; } - public string Magic { get; set; } + public string? Magic { get; set; } } diff --git a/src/SharpCompress/Common/Tar/TarEntry.cs b/src/SharpCompress/Common/Tar/TarEntry.cs index 59743070..2597b837 100644 --- a/src/SharpCompress/Common/Tar/TarEntry.cs +++ b/src/SharpCompress/Common/Tar/TarEntry.cs @@ -1,5 +1,3 @@ -#nullable disable - using System; using System.Collections.Generic; using System.IO; @@ -10,9 +8,9 @@ namespace SharpCompress.Common.Tar; public class TarEntry : Entry { - private readonly TarFilePart _filePart; + private readonly TarFilePart? _filePart; - internal TarEntry(TarFilePart filePart, CompressionType type) + internal TarEntry(TarFilePart? filePart, CompressionType type) { _filePart = filePart; CompressionType = type; @@ -22,15 +20,15 @@ internal TarEntry(TarFilePart filePart, CompressionType type) public override long Crc => 0; - public override string Key => _filePart.Header.Name; + public override string? Key => _filePart?.Header.Name; - public override string LinkTarget => _filePart.Header.LinkName; + public override string? LinkTarget => _filePart?.Header.LinkName; - public override long CompressedSize => _filePart.Header.Size; + public override long CompressedSize => _filePart?.Header.Size ?? 0; - public override long Size => _filePart.Header.Size; + public override long Size => _filePart?.Header.Size ?? 0; - public override DateTime? LastModifiedTime => _filePart.Header.LastModifiedTime; + public override DateTime? LastModifiedTime => _filePart?.Header.LastModifiedTime; public override DateTime? CreatedTime => null; @@ -40,17 +38,17 @@ internal TarEntry(TarFilePart filePart, CompressionType type) public override bool IsEncrypted => false; - public override bool IsDirectory => _filePart.Header.EntryType == EntryType.Directory; + public override bool IsDirectory => _filePart?.Header.EntryType == EntryType.Directory; public override bool IsSplitAfter => false; - public long Mode => _filePart.Header.Mode; + public long Mode => _filePart?.Header.Mode ?? 0; - public long UserID => _filePart.Header.UserId; + public long UserID => _filePart?.Header.UserId ?? 0; - public long GroupId => _filePart.Header.GroupId; + public long GroupId => _filePart?.Header.GroupId ?? 0; - internal override IEnumerable Parts => _filePart.AsEnumerable(); + internal override IEnumerable Parts => _filePart.Empty(); internal static IEnumerable GetEntries( StreamingMode mode, @@ -59,17 +57,17 @@ internal static IEnumerable GetEntries( ArchiveEncoding archiveEncoding ) { - foreach (var h in TarHeaderFactory.ReadHeader(mode, stream, archiveEncoding)) + foreach (var header in TarHeaderFactory.ReadHeader(mode, stream, archiveEncoding)) { - if (h != null) + if (header != null) { if (mode == StreamingMode.Seekable) { - yield return new TarEntry(new TarFilePart(h, stream), compressionType); + yield return new TarEntry(new TarFilePart(header, stream), compressionType); } else { - yield return new TarEntry(new TarFilePart(h, null), compressionType); + yield return new TarEntry(new TarFilePart(header, null), compressionType); } } else diff --git a/src/SharpCompress/Common/Tar/TarFilePart.cs b/src/SharpCompress/Common/Tar/TarFilePart.cs index bfd2fe29..06516114 100644 --- a/src/SharpCompress/Common/Tar/TarFilePart.cs +++ b/src/SharpCompress/Common/Tar/TarFilePart.cs @@ -5,9 +5,9 @@ namespace SharpCompress.Common.Tar; internal sealed class TarFilePart : FilePart { - private readonly Stream _seekableStream; + private readonly Stream? _seekableStream; - internal TarFilePart(TarHeader header, Stream seekableStream) + internal TarFilePart(TarHeader header, Stream? seekableStream) : base(header.ArchiveEncoding) { _seekableStream = seekableStream; @@ -16,16 +16,16 @@ internal TarFilePart(TarHeader header, Stream seekableStream) internal TarHeader Header { get; } - internal override string FilePartName => Header.Name; + internal override string? FilePartName => Header?.Name; internal override Stream GetCompressedStream() { if (_seekableStream != null) { - _seekableStream.Position = Header.DataStartPosition!.Value; + _seekableStream.Position = Header.DataStartPosition ?? 0; return new TarReadOnlySubStream(_seekableStream, Header.Size); } - return Header.PackedStream; + return Header.PackedStream.NotNull(); } internal override Stream? GetRawStream() => null; diff --git a/src/SharpCompress/Common/Tar/TarHeaderFactory.cs b/src/SharpCompress/Common/Tar/TarHeaderFactory.cs index 85eec5b4..13813353 100644 --- a/src/SharpCompress/Common/Tar/TarHeaderFactory.cs +++ b/src/SharpCompress/Common/Tar/TarHeaderFactory.cs @@ -28,7 +28,6 @@ ArchiveEncoding archiveEncoding switch (mode) { case StreamingMode.Seekable: - { header.DataStartPosition = reader.BaseStream.Position; @@ -37,7 +36,6 @@ ArchiveEncoding archiveEncoding } break; case StreamingMode.Streaming: - { header.PackedStream = new TarReadOnlySubStream(stream, header.Size); } diff --git a/src/SharpCompress/Common/Volume.cs b/src/SharpCompress/Common/Volume.cs index 1f259257..7be161ec 100644 --- a/src/SharpCompress/Common/Volume.cs +++ b/src/SharpCompress/Common/Volume.cs @@ -9,11 +9,11 @@ public abstract class Volume : IVolume { private readonly Stream _actualStream; - internal Volume(Stream stream, ReaderOptions readerOptions, int index = 0) + internal Volume(Stream stream, ReaderOptions? readerOptions, int index = 0) { Index = index; - ReaderOptions = readerOptions; - if (readerOptions.LeaveStreamOpen) + ReaderOptions = readerOptions ?? new ReaderOptions(); + if (ReaderOptions.LeaveStreamOpen) { stream = NonDisposingStream.Create(stream); } @@ -32,7 +32,7 @@ internal Volume(Stream stream, ReaderOptions readerOptions, int index = 0) public virtual int Index { get; internal set; } - public string FileName => (_actualStream as FileStream)?.Name!; + public string? FileName => (_actualStream as FileStream)?.Name; /// /// RarArchive is part of a multi-part archive. diff --git a/src/SharpCompress/Common/Zip/Headers/ZipFileEntry.cs b/src/SharpCompress/Common/Zip/Headers/ZipFileEntry.cs index edba3ebe..0570bf6d 100644 --- a/src/SharpCompress/Common/Zip/Headers/ZipFileEntry.cs +++ b/src/SharpCompress/Common/Zip/Headers/ZipFileEntry.cs @@ -1,5 +1,3 @@ -#nullable disable - using System; using System.Buffers.Binary; using System.Collections.Generic; @@ -20,21 +18,21 @@ internal bool IsDirectory { get { - if (Name.EndsWith('/')) + if (Name?.EndsWith('/') ?? false) { return true; } //.NET Framework 4.5 : System.IO.Compression::CreateFromDirectory() probably writes backslashes to headers - return CompressedSize == 0 && UncompressedSize == 0 && Name.EndsWith('\\'); + return CompressedSize == 0 && UncompressedSize == 0 && (Name?.EndsWith('\\') ?? false); } } - internal Stream PackedStream { get; set; } + internal Stream? PackedStream { get; set; } internal ArchiveEncoding ArchiveEncoding { get; } - internal string Name { get; set; } + internal string? Name { get; set; } internal HeaderFlags Flags { get; set; } @@ -48,7 +46,7 @@ internal bool IsDirectory internal List Extra { get; set; } - public string Password { get; set; } + public string? Password { get; set; } internal PkwareTraditionalEncryptionData ComposeEncryptionData(Stream archiveStream) { @@ -65,7 +63,7 @@ internal PkwareTraditionalEncryptionData ComposeEncryptionData(Stream archiveStr return encryptionData; } - internal WinzipAesEncryptionData WinzipAesEncryptionData { get; set; } + internal WinzipAesEncryptionData? WinzipAesEncryptionData { get; set; } /// /// The last modified date as read from the Local or Central Directory header. @@ -119,7 +117,7 @@ protected void LoadExtra(byte[] extra) } } - internal ZipFilePart Part { get; set; } + internal ZipFilePart? Part { get; set; } internal bool IsZip64 => CompressedSize >= uint.MaxValue; } diff --git a/src/SharpCompress/Common/Zip/PkwareTraditionalEncryptionData.cs b/src/SharpCompress/Common/Zip/PkwareTraditionalEncryptionData.cs index b5d4b0fb..7b517857 100644 --- a/src/SharpCompress/Common/Zip/PkwareTraditionalEncryptionData.cs +++ b/src/SharpCompress/Common/Zip/PkwareTraditionalEncryptionData.cs @@ -103,7 +103,7 @@ private void Initialize(string password) internal byte[] StringToByteArray(string value) { - var a = _archiveEncoding.Password.GetBytes(value); + var a = _archiveEncoding.GetPasswordEncoding().GetBytes(value); return a; } diff --git a/src/SharpCompress/Common/Zip/SeekableZipFilePart.cs b/src/SharpCompress/Common/Zip/SeekableZipFilePart.cs index 63fdc933..e37008e5 100644 --- a/src/SharpCompress/Common/Zip/SeekableZipFilePart.cs +++ b/src/SharpCompress/Common/Zip/SeekableZipFilePart.cs @@ -42,16 +42,16 @@ private void LoadLocalHeader() protected override Stream CreateBaseStream() { - BaseStream.Position = Header.DataStartPosition!.Value; + BaseStream.Position = Header.DataStartPosition.NotNull(); if ( (Header.CompressedSize == 0) && FlagUtility.HasFlag(Header.Flags, HeaderFlags.UsePostDataDescriptor) - && (_directoryEntryHeader?.HasData == true) - && (_directoryEntryHeader?.CompressedSize != 0) + && _directoryEntryHeader.HasData + && (_directoryEntryHeader.CompressedSize != 0) ) { - return new ReadOnlySubStream(BaseStream, _directoryEntryHeader!.CompressedSize); + return new ReadOnlySubStream(BaseStream, _directoryEntryHeader.CompressedSize); } return BaseStream; diff --git a/src/SharpCompress/Common/Zip/StreamingZipFilePart.cs b/src/SharpCompress/Common/Zip/StreamingZipFilePart.cs index 1cd1f61f..97e44b6b 100644 --- a/src/SharpCompress/Common/Zip/StreamingZipFilePart.cs +++ b/src/SharpCompress/Common/Zip/StreamingZipFilePart.cs @@ -13,7 +13,7 @@ internal sealed class StreamingZipFilePart : ZipFilePart internal StreamingZipFilePart(ZipFileEntry header, Stream stream) : base(header, stream) { } - protected override Stream CreateBaseStream() => Header.PackedStream; + protected override Stream CreateBaseStream() => Header.PackedStream.NotNull(); internal override Stream GetCompressedStream() { diff --git a/src/SharpCompress/Common/Zip/StreamingZipHeaderFactory.cs b/src/SharpCompress/Common/Zip/StreamingZipHeaderFactory.cs index 68817d27..c0af5408 100644 --- a/src/SharpCompress/Common/Zip/StreamingZipHeaderFactory.cs +++ b/src/SharpCompress/Common/Zip/StreamingZipHeaderFactory.cs @@ -42,6 +42,10 @@ internal IEnumerable ReadStreamHeader(Stream stream) ) ) { + if (_lastEntryHeader.Part is null) + { + continue; + } reader = ((StreamingZipFilePart)_lastEntryHeader.Part).FixStreamedFileLocation( ref rewindableStream ); diff --git a/src/SharpCompress/Common/Zip/ZipEntry.cs b/src/SharpCompress/Common/Zip/ZipEntry.cs index 80e33360..c2cecf39 100644 --- a/src/SharpCompress/Common/Zip/ZipEntry.cs +++ b/src/SharpCompress/Common/Zip/ZipEntry.cs @@ -1,5 +1,3 @@ -#nullable disable - using System; using System.Collections.Generic; using SharpCompress.Common.Zip.Headers; @@ -8,22 +6,23 @@ namespace SharpCompress.Common.Zip; public class ZipEntry : Entry { - private readonly ZipFilePart _filePart; + private readonly ZipFilePart? _filePart; - internal ZipEntry(ZipFilePart filePart) + internal ZipEntry(ZipFilePart? filePart) { - if (filePart != null) + if (filePart == null) { - _filePart = filePart; - LastModifiedTime = Utility.DosDateToDateTime( - filePart.Header.LastModifiedDate, - filePart.Header.LastModifiedTime - ); + return; } + _filePart = filePart; + LastModifiedTime = Utility.DosDateToDateTime( + filePart.Header.LastModifiedDate, + filePart.Header.LastModifiedTime + ); } public override CompressionType CompressionType => - _filePart.Header.CompressionMethod switch + _filePart?.Header.CompressionMethod switch { ZipCompressionMethod.BZip2 => CompressionType.BZip2, ZipCompressionMethod.Deflate => CompressionType.Deflate, @@ -35,15 +34,15 @@ internal ZipEntry(ZipFilePart filePart) _ => CompressionType.Unknown }; - public override long Crc => _filePart.Header.Crc; + public override long Crc => _filePart?.Header.Crc ?? 0; - public override string Key => _filePart.Header.Name; + public override string? Key => _filePart?.Header.Name; - public override string LinkTarget => null; + public override string? LinkTarget => null; - public override long CompressedSize => _filePart.Header.CompressedSize; + public override long CompressedSize => _filePart?.Header.CompressedSize ?? 0; - public override long Size => _filePart.Header.UncompressedSize; + public override long Size => _filePart?.Header.UncompressedSize ?? 0; public override DateTime? LastModifiedTime { get; } @@ -54,11 +53,11 @@ internal ZipEntry(ZipFilePart filePart) public override DateTime? ArchivedTime => null; public override bool IsEncrypted => - FlagUtility.HasFlag(_filePart.Header.Flags, HeaderFlags.Encrypted); + FlagUtility.HasFlag(_filePart?.Header.Flags ?? HeaderFlags.None, HeaderFlags.Encrypted); - public override bool IsDirectory => _filePart.Header.IsDirectory; + public override bool IsDirectory => _filePart?.Header.IsDirectory ?? false; public override bool IsSplitAfter => false; - internal override IEnumerable Parts => _filePart.AsEnumerable(); + internal override IEnumerable Parts => _filePart.Empty(); } diff --git a/src/SharpCompress/Common/Zip/ZipFilePart.cs b/src/SharpCompress/Common/Zip/ZipFilePart.cs index f8a0e8a8..e8516150 100644 --- a/src/SharpCompress/Common/Zip/ZipFilePart.cs +++ b/src/SharpCompress/Common/Zip/ZipFilePart.cs @@ -29,7 +29,7 @@ internal ZipFilePart(ZipFileEntry header, Stream stream) internal Stream BaseStream { get; } internal ZipFileEntry Header { get; set; } - internal override string FilePartName => Header.Name; + internal override string? FilePartName => Header.Name; internal override Stream GetCompressedStream() { diff --git a/src/SharpCompress/Common/Zip/ZipHeaderFactory.cs b/src/SharpCompress/Common/Zip/ZipHeaderFactory.cs index 628cbb20..40fe178f 100644 --- a/src/SharpCompress/Common/Zip/ZipHeaderFactory.cs +++ b/src/SharpCompress/Common/Zip/ZipHeaderFactory.cs @@ -55,7 +55,13 @@ ArchiveEncoding archiveEncoding } case POST_DATA_DESCRIPTOR: { - if (FlagUtility.HasFlag(_lastEntryHeader!.Flags, HeaderFlags.UsePostDataDescriptor)) + if ( + FlagUtility.HasFlag( + _lastEntryHeader.NotNull().Flags, + HeaderFlags.UsePostDataDescriptor + ) + && _lastEntryHeader != null + ) { _lastEntryHeader.Crc = reader.ReadUInt32(); _lastEntryHeader.CompressedSize = zip64 diff --git a/src/SharpCompress/Compressors/LZMA/AesDecoderStream.cs b/src/SharpCompress/Compressors/LZMA/AesDecoderStream.cs index 8e884c97..ecec4f29 100644 --- a/src/SharpCompress/Compressors/LZMA/AesDecoderStream.cs +++ b/src/SharpCompress/Compressors/LZMA/AesDecoderStream.cs @@ -20,7 +20,8 @@ internal sealed class AesDecoderStream : DecoderStream2 public AesDecoderStream(Stream input, byte[] info, IPasswordProvider pass, long limit) { - if (pass.CryptoGetTextPassword() == null) + var password = pass.CryptoGetTextPassword(); + if (password == null) { throw new SharpCompress.Common.CryptographicException( "Encrypted 7Zip archive has no password specified." @@ -37,8 +38,8 @@ public AesDecoderStream(Stream input, byte[] info, IPasswordProvider pass, long Init(info, out var numCyclesPower, out var salt, out var seed); - var password = Encoding.Unicode.GetBytes(pass.CryptoGetTextPassword()); - var key = InitKey(numCyclesPower, salt, password); + var passwordBytes = Encoding.Unicode.GetBytes(password); + var key = InitKey(numCyclesPower, salt, passwordBytes); if (key == null) { throw new InvalidOperationException("Initialized with null key"); diff --git a/src/SharpCompress/Compressors/LZMA/LZipStream.cs b/src/SharpCompress/Compressors/LZMA/LZipStream.cs index 4b63a621..5e987214 100644 --- a/src/SharpCompress/Compressors/LZMA/LZipStream.cs +++ b/src/SharpCompress/Compressors/LZMA/LZipStream.cs @@ -63,18 +63,18 @@ public void Finish() var crc32Stream = (Crc32Stream)_stream; crc32Stream.WrappedStream.Dispose(); crc32Stream.Dispose(); - var compressedCount = _countingWritableSubStream!.Count; + var compressedCount = _countingWritableSubStream.NotNull().Count; Span intBuf = stackalloc byte[8]; BinaryPrimitives.WriteUInt32LittleEndian(intBuf, crc32Stream.Crc); - _countingWritableSubStream.Write(intBuf.Slice(0, 4)); + _countingWritableSubStream?.Write(intBuf.Slice(0, 4)); BinaryPrimitives.WriteInt64LittleEndian(intBuf, _writeCount); - _countingWritableSubStream.Write(intBuf); + _countingWritableSubStream?.Write(intBuf); //total with headers BinaryPrimitives.WriteUInt64LittleEndian(intBuf, compressedCount + 6 + 20); - _countingWritableSubStream.Write(intBuf); + _countingWritableSubStream?.Write(intBuf); } _finished = true; } diff --git a/src/SharpCompress/Compressors/LZMA/Utilites/IPasswordProvider.cs b/src/SharpCompress/Compressors/LZMA/Utilites/IPasswordProvider.cs index 2f0eb904..d7d787d2 100644 --- a/src/SharpCompress/Compressors/LZMA/Utilites/IPasswordProvider.cs +++ b/src/SharpCompress/Compressors/LZMA/Utilites/IPasswordProvider.cs @@ -2,5 +2,5 @@ namespace SharpCompress.Compressors.LZMA.Utilites; internal interface IPasswordProvider { - string CryptoGetTextPassword(); + string? CryptoGetTextPassword(); } diff --git a/src/SharpCompress/Compressors/Rar/UnpackV2017/Unpack.unpack50_cpp.cs b/src/SharpCompress/Compressors/Rar/UnpackV2017/Unpack.unpack50_cpp.cs index 62567888..143e3d31 100644 --- a/src/SharpCompress/Compressors/Rar/UnpackV2017/Unpack.unpack50_cpp.cs +++ b/src/SharpCompress/Compressors/Rar/UnpackV2017/Unpack.unpack50_cpp.cs @@ -530,7 +530,6 @@ private byte[] ApplyFilter(byte[] __d, uint DataSize, UnpackFilter Flt) { case FILTER_E8: case FILTER_E8E9: - { var FileOffset = (uint)WrittenFileSize; @@ -569,7 +568,6 @@ private byte[] ApplyFilter(byte[] __d, uint DataSize, UnpackFilter Flt) } return SrcData; case FILTER_ARM: - { var FileOffset = (uint)WrittenFileSize; // DataSize is unsigned, so we use "CurPos+3" and not "DataSize-3" diff --git a/src/SharpCompress/Compressors/Rar/VM/RarVM.cs b/src/SharpCompress/Compressors/Rar/VM/RarVM.cs index 185b24d4..f758b8a4 100644 --- a/src/SharpCompress/Compressors/Rar/VM/RarVM.cs +++ b/src/SharpCompress/Compressors/Rar/VM/RarVM.cs @@ -228,7 +228,6 @@ private bool ExecuteCode(List preparedCode, int cmdCount) break; case VMCommands.VM_CMP: - { var value1 = (VMFlags)GetValue(cmd.IsByteMode, Mem, op1); var result = value1 - GetValue(cmd.IsByteMode, Mem, op2); @@ -247,7 +246,6 @@ private bool ExecuteCode(List preparedCode, int cmdCount) break; case VMCommands.VM_CMPB: - { var value1 = (VMFlags)GetValue(true, Mem, op1); var result = value1 - GetValue(true, Mem, op2); @@ -265,7 +263,6 @@ private bool ExecuteCode(List preparedCode, int cmdCount) break; case VMCommands.VM_CMPD: - { var value1 = (VMFlags)GetValue(false, Mem, op1); var result = value1 - GetValue(false, Mem, op2); @@ -283,7 +280,6 @@ private bool ExecuteCode(List preparedCode, int cmdCount) break; case VMCommands.VM_ADD: - { var value1 = GetValue(cmd.IsByteMode, Mem, op1); var result = (int)( @@ -351,7 +347,6 @@ private bool ExecuteCode(List preparedCode, int cmdCount) break; case VMCommands.VM_SUB: - { var value1 = GetValue(cmd.IsByteMode, Mem, op1); var result = (int)( @@ -411,7 +406,6 @@ private bool ExecuteCode(List preparedCode, int cmdCount) break; case VMCommands.VM_INC: - { var result = (int)(GetValue(cmd.IsByteMode, Mem, op1) & (0xFFffFFffL + 1L)); if (cmd.IsByteMode) @@ -440,7 +434,6 @@ private bool ExecuteCode(List preparedCode, int cmdCount) break; case VMCommands.VM_DEC: - { var result = (int)(GetValue(cmd.IsByteMode, Mem, op1) & (0xFFffFFff - 1)); SetValue(cmd.IsByteMode, Mem, op1, result); @@ -463,7 +456,6 @@ private bool ExecuteCode(List preparedCode, int cmdCount) continue; case VMCommands.VM_XOR: - { var result = GetValue(cmd.IsByteMode, Mem, op1) ^ GetValue(cmd.IsByteMode, Mem, op2); @@ -475,7 +467,6 @@ private bool ExecuteCode(List preparedCode, int cmdCount) break; case VMCommands.VM_AND: - { var result = GetValue(cmd.IsByteMode, Mem, op1) & GetValue(cmd.IsByteMode, Mem, op2); @@ -487,7 +478,6 @@ private bool ExecuteCode(List preparedCode, int cmdCount) break; case VMCommands.VM_OR: - { var result = GetValue(cmd.IsByteMode, Mem, op1) | GetValue(cmd.IsByteMode, Mem, op2); @@ -499,7 +489,6 @@ private bool ExecuteCode(List preparedCode, int cmdCount) break; case VMCommands.VM_TEST: - { var result = GetValue(cmd.IsByteMode, Mem, op1) & GetValue(cmd.IsByteMode, Mem, op2); @@ -578,7 +567,6 @@ private bool ExecuteCode(List preparedCode, int cmdCount) break; case VMCommands.VM_SHL: - { var value1 = GetValue(cmd.IsByteMode, Mem, op1); var value2 = GetValue(cmd.IsByteMode, Mem, op2); @@ -596,7 +584,6 @@ private bool ExecuteCode(List preparedCode, int cmdCount) break; case VMCommands.VM_SHR: - { var value1 = GetValue(cmd.IsByteMode, Mem, op1); var value2 = GetValue(cmd.IsByteMode, Mem, op2); @@ -610,7 +597,6 @@ private bool ExecuteCode(List preparedCode, int cmdCount) break; case VMCommands.VM_SAR: - { var value1 = GetValue(cmd.IsByteMode, Mem, op1); var value2 = GetValue(cmd.IsByteMode, Mem, op2); @@ -624,7 +610,6 @@ private bool ExecuteCode(List preparedCode, int cmdCount) break; case VMCommands.VM_NEG: - { var result = -GetValue(cmd.IsByteMode, Mem, op1); flags = (VMFlags)( @@ -645,7 +630,6 @@ private bool ExecuteCode(List preparedCode, int cmdCount) break; case VMCommands.VM_PUSHA: - { for (int i = 0, SP = R[7] - 4; i < regCount; i++, SP -= 4) { @@ -656,7 +640,6 @@ private bool ExecuteCode(List preparedCode, int cmdCount) break; case VMCommands.VM_POPA: - { for (int i = 0, SP = R[7]; i < regCount; i++, SP += 4) { @@ -684,7 +667,6 @@ private bool ExecuteCode(List preparedCode, int cmdCount) break; case VMCommands.VM_XCHG: - { var value1 = GetValue(cmd.IsByteMode, Mem, op1); SetValue(cmd.IsByteMode, Mem, op1, GetValue(cmd.IsByteMode, Mem, op2)); @@ -693,7 +675,6 @@ private bool ExecuteCode(List preparedCode, int cmdCount) break; case VMCommands.VM_MUL: - { var result = (int)( ( @@ -707,7 +688,6 @@ private bool ExecuteCode(List preparedCode, int cmdCount) break; case VMCommands.VM_DIV: - { var divider = GetValue(cmd.IsByteMode, Mem, op2); if (divider != 0) @@ -719,7 +699,6 @@ private bool ExecuteCode(List preparedCode, int cmdCount) break; case VMCommands.VM_ADC: - { var value1 = GetValue(cmd.IsByteMode, Mem, op1); var FC = (int)(flags & VMFlags.VM_FC); @@ -749,7 +728,6 @@ private bool ExecuteCode(List preparedCode, int cmdCount) break; case VMCommands.VM_SBB: - { var value1 = GetValue(cmd.IsByteMode, Mem, op1); var FC = (int)(flags & VMFlags.VM_FC); @@ -1156,7 +1134,6 @@ private void ExecuteStandardFilter(VMStandardFilters filterType) { case VMStandardFilters.VMSF_E8: case VMStandardFilters.VMSF_E8E9: - { var dataSize = R[4]; long fileOffset = R[6] & unchecked((int)0xFFffFFff); @@ -1211,7 +1188,6 @@ private void ExecuteStandardFilter(VMStandardFilters filterType) break; case VMStandardFilters.VMSF_ITANIUM: - { var dataSize = R[4]; long fileOffset = R[6] & unchecked((int)0xFFffFFff); @@ -1269,7 +1245,6 @@ private void ExecuteStandardFilter(VMStandardFilters filterType) break; case VMStandardFilters.VMSF_DELTA: - { var dataSize = R[4] & unchecked((int)0xFFffFFff); var channels = R[0] & unchecked((int)0xFFffFFff); @@ -1300,7 +1275,6 @@ private void ExecuteStandardFilter(VMStandardFilters filterType) break; case VMStandardFilters.VMSF_RGB: - { // byte *SrcData=Mem,*DestData=SrcData+DataSize; int dataSize = R[4], @@ -1366,7 +1340,6 @@ private void ExecuteStandardFilter(VMStandardFilters filterType) break; case VMStandardFilters.VMSF_AUDIO: - { int dataSize = R[4], channels = R[0]; @@ -1497,7 +1470,6 @@ private void ExecuteStandardFilter(VMStandardFilters filterType) break; case VMStandardFilters.VMSF_UPCASE: - { int dataSize = R[4], srcPos = 0, diff --git a/src/SharpCompress/IO/SourceStream.cs b/src/SharpCompress/IO/SourceStream.cs index 26ce0af1..46c0a3f6 100644 --- a/src/SharpCompress/IO/SourceStream.cs +++ b/src/SharpCompress/IO/SourceStream.cs @@ -11,8 +11,8 @@ public class SourceStream : Stream private long _prevSize; private readonly List _files; private readonly List _streams; - private readonly Func _getFilePart; - private readonly Func _getStreamPart; + private readonly Func? _getFilePart; + private readonly Func? _getStreamPart; private int _stream; public SourceStream(FileInfo file, Func getPart, ReaderOptions options) @@ -38,8 +38,8 @@ ReaderOptions options if (!IsFileMode) { _streams.Add(stream!); - _getStreamPart = getStreamPart!; - _getFilePart = _ => null!; + _getStreamPart = getStreamPart; + _getFilePart = _ => null; if (stream is FileStream fileStream) { _files.Add(new FileInfo(fileStream.Name)); @@ -49,8 +49,8 @@ ReaderOptions options { _files.Add(file!); _streams.Add(_files[0].OpenRead()); - _getFilePart = getFilePart!; - _getStreamPart = _ => null!; + _getFilePart = getFilePart; + _getStreamPart = _ => null; } _stream = 0; _prevSize = 0; @@ -78,7 +78,7 @@ public bool LoadStream(int index) //ensure all parts to id are loaded { if (IsFileMode) { - var f = _getFilePart(_streams.Count); + var f = _getFilePart.NotNull("GetFilePart is null")(_streams.Count); if (f == null) { _stream = _streams.Count - 1; @@ -90,7 +90,7 @@ public bool LoadStream(int index) //ensure all parts to id are loaded } else { - var s = _getStreamPart(_streams.Count); + var s = _getStreamPart.NotNull("GetStreamPart is null")(_streams.Count); if (s == null) { _stream = _streams.Count - 1; diff --git a/src/SharpCompress/NotNullExtensions.cs b/src/SharpCompress/NotNullExtensions.cs new file mode 100644 index 00000000..88e95c41 --- /dev/null +++ b/src/SharpCompress/NotNullExtensions.cs @@ -0,0 +1,65 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Runtime.CompilerServices; + +namespace SharpCompress; + +public static class NotNullExtensions +{ + public static IEnumerable Empty(this IEnumerable? source) => + source ?? Enumerable.Empty(); + + public static IEnumerable Empty(this T? source) + { + if (source is null) + { + return Enumerable.Empty(); + } + return source.AsEnumerable(); + } + +#if NETFRAMEWORK || NETSTANDARD + public static T NotNull(this T? obj, string? message = null) + where T : class + { + if (obj is null) + { + throw new ArgumentNullException(message ?? "Value is null"); + } + return obj; + } + + public static T NotNull(this T? obj, string? message = null) + where T : struct + { + if (obj is null) + { + throw new ArgumentNullException(message ?? "Value is null"); + } + return obj.Value; + } +#else + + public static T NotNull( + [NotNull] this T? obj, + [CallerArgumentExpression(nameof(obj))] string? paramName = null + ) + where T : class + { + ArgumentNullException.ThrowIfNull(obj, paramName); + return obj; + } + + public static T NotNull( + [NotNull] this T? obj, + [CallerArgumentExpression(nameof(obj))] string? paramName = null + ) + where T : struct + { + ArgumentNullException.ThrowIfNull(obj, paramName); + return obj.Value; + } +#endif +} diff --git a/src/SharpCompress/Readers/AbstractReader.cs b/src/SharpCompress/Readers/AbstractReader.cs index 53069414..b36acd53 100644 --- a/src/SharpCompress/Readers/AbstractReader.cs +++ b/src/SharpCompress/Readers/AbstractReader.cs @@ -13,9 +13,9 @@ public abstract class AbstractReader : IReader, IReaderExtracti where TEntry : Entry where TVolume : Volume { - private bool completed; - private IEnumerator? entriesForCurrentReadStream; - private bool wroteCurrentEntry; + private bool _completed; + private IEnumerator? _entriesForCurrentReadStream; + private bool _wroteCurrentEntry; public event EventHandler>? EntryExtractionProgress; @@ -35,18 +35,18 @@ internal AbstractReader(ReaderOptions options, ArchiveType archiveType) /// /// Current volume that the current entry resides in /// - public abstract TVolume Volume { get; } + public abstract TVolume? Volume { get; } /// /// Current file entry /// - public TEntry Entry => entriesForCurrentReadStream!.Current; + public TEntry Entry => _entriesForCurrentReadStream.NotNull().Current; #region IDisposable Members public void Dispose() { - entriesForCurrentReadStream?.Dispose(); + _entriesForCurrentReadStream?.Dispose(); Volume?.Dispose(); } @@ -61,7 +61,7 @@ public void Dispose() /// public void Cancel() { - if (!completed) + if (!_completed) { Cancelled = true; } @@ -69,7 +69,7 @@ public void Cancel() public bool MoveToNextEntry() { - if (completed) + if (_completed) { return false; } @@ -77,27 +77,27 @@ public bool MoveToNextEntry() { throw new ReaderCancelledException("Reader has been cancelled."); } - if (entriesForCurrentReadStream is null) + if (_entriesForCurrentReadStream is null) { return LoadStreamForReading(RequestInitialStream()); } - if (!wroteCurrentEntry) + if (!_wroteCurrentEntry) { SkipEntry(); } - wroteCurrentEntry = false; + _wroteCurrentEntry = false; if (NextEntryForCurrentStream()) { return true; } - completed = true; + _completed = true; return false; } protected bool LoadStreamForReading(Stream stream) { - entriesForCurrentReadStream?.Dispose(); - if ((stream is null) || (!stream.CanRead)) + _entriesForCurrentReadStream?.Dispose(); + if (stream is null || !stream.CanRead) { throw new MultipartStreamRequiredException( "File is split into multiple archives: '" @@ -105,13 +105,15 @@ protected bool LoadStreamForReading(Stream stream) + "'. A new readable stream is required. Use Cancel if it was intended." ); } - entriesForCurrentReadStream = GetEntries(stream).GetEnumerator(); - return entriesForCurrentReadStream.MoveNext(); + _entriesForCurrentReadStream = GetEntries(stream).GetEnumerator(); + return _entriesForCurrentReadStream.MoveNext(); } - protected virtual Stream RequestInitialStream() => Volume.Stream; + protected virtual Stream RequestInitialStream() => + Volume.NotNull("Volume isn't loaded.").Stream; - internal virtual bool NextEntryForCurrentStream() => entriesForCurrentReadStream!.MoveNext(); + internal virtual bool NextEntryForCurrentStream() => + _entriesForCurrentReadStream.NotNull().MoveNext(); protected abstract IEnumerable GetEntries(Stream stream); @@ -149,7 +151,7 @@ private void Skip() public void WriteEntryTo(Stream writableStream) { - if (wroteCurrentEntry) + if (_wroteCurrentEntry) { throw new ArgumentException("WriteEntryTo or OpenEntryStream can only be called once."); } @@ -166,7 +168,7 @@ public void WriteEntryTo(Stream writableStream) } Write(writableStream); - wroteCurrentEntry = true; + _wroteCurrentEntry = true; } internal void Write(Stream writeStream) @@ -178,12 +180,12 @@ internal void Write(Stream writeStream) public EntryStream OpenEntryStream() { - if (wroteCurrentEntry) + if (_wroteCurrentEntry) { throw new ArgumentException("WriteEntryTo or OpenEntryStream can only be called once."); } var stream = GetEntryStream(); - wroteCurrentEntry = true; + _wroteCurrentEntry = true; return stream; } diff --git a/src/SharpCompress/Readers/GZip/GZipReader.cs b/src/SharpCompress/Readers/GZip/GZipReader.cs index 27394cc7..73bc4a9d 100644 --- a/src/SharpCompress/Readers/GZip/GZipReader.cs +++ b/src/SharpCompress/Readers/GZip/GZipReader.cs @@ -7,8 +7,8 @@ namespace SharpCompress.Readers.GZip; public class GZipReader : AbstractReader { - internal GZipReader(Stream stream, ReaderOptions options) - : base(options, ArchiveType.GZip) => Volume = new GZipVolume(stream, options); + private GZipReader(Stream stream, ReaderOptions options) + : base(options, ArchiveType.GZip) => Volume = new GZipVolume(stream, options, 0); public override GZipVolume Volume { get; } diff --git a/src/SharpCompress/Readers/Rar/MultiVolumeRarReader.cs b/src/SharpCompress/Readers/Rar/MultiVolumeRarReader.cs index 76899e0c..556ca9f5 100644 --- a/src/SharpCompress/Readers/Rar/MultiVolumeRarReader.cs +++ b/src/SharpCompress/Readers/Rar/MultiVolumeRarReader.cs @@ -17,7 +17,7 @@ internal class MultiVolumeRarReader : RarReader internal MultiVolumeRarReader(IEnumerable streams, ReaderOptions options) : base(options) => this.streams = streams.GetEnumerator(); - internal override void ValidateArchive(RarVolume archive) { } + protected override void ValidateArchive(RarVolume archive) { } protected override Stream RequestInitialStream() { diff --git a/src/SharpCompress/Readers/Rar/RarReader.cs b/src/SharpCompress/Readers/Rar/RarReader.cs index a46d2852..2398cfac 100644 --- a/src/SharpCompress/Readers/Rar/RarReader.cs +++ b/src/SharpCompress/Readers/Rar/RarReader.cs @@ -14,16 +14,16 @@ namespace SharpCompress.Readers.Rar; public abstract class RarReader : AbstractReader { private RarVolume? volume; - internal Lazy UnpackV2017 { get; } = + private Lazy UnpackV2017 { get; } = new(() => new Compressors.Rar.UnpackV2017.Unpack()); - internal Lazy UnpackV1 { get; } = new(() => new Compressors.Rar.UnpackV1.Unpack()); + private Lazy UnpackV1 { get; } = new(() => new Compressors.Rar.UnpackV1.Unpack()); internal RarReader(ReaderOptions options) : base(options, ArchiveType.Rar) { } - internal abstract void ValidateArchive(RarVolume archive); + protected abstract void ValidateArchive(RarVolume archive); - public override RarVolume Volume => volume!; + public override RarVolume? Volume => volume; /// /// Opens a RarReader for Non-seeking usage with a single volume @@ -51,7 +51,7 @@ public static RarReader Open(IEnumerable streams, ReaderOptions? options protected override IEnumerable GetEntries(Stream stream) { - volume = new RarReaderVolume(stream, Options); + volume = new RarReaderVolume(stream, Options, 0); foreach (var fp in volume.ReadFileParts()) { ValidateArchive(volume); diff --git a/src/SharpCompress/Readers/Rar/RarReaderVolume.cs b/src/SharpCompress/Readers/Rar/RarReaderVolume.cs index 85beb897..d614260a 100644 --- a/src/SharpCompress/Readers/Rar/RarReaderVolume.cs +++ b/src/SharpCompress/Readers/Rar/RarReaderVolume.cs @@ -8,7 +8,7 @@ namespace SharpCompress.Readers.Rar; public class RarReaderVolume : RarVolume { - internal RarReaderVolume(Stream stream, ReaderOptions options, int index = 0) + internal RarReaderVolume(Stream stream, ReaderOptions options, int index) : base(StreamingMode.Streaming, stream, options, index) { } internal override RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader) => diff --git a/src/SharpCompress/Readers/Rar/SingleVolumeRarReader.cs b/src/SharpCompress/Readers/Rar/SingleVolumeRarReader.cs index 768aa115..165c92f7 100644 --- a/src/SharpCompress/Readers/Rar/SingleVolumeRarReader.cs +++ b/src/SharpCompress/Readers/Rar/SingleVolumeRarReader.cs @@ -11,7 +11,7 @@ internal class SingleVolumeRarReader : RarReader internal SingleVolumeRarReader(Stream stream, ReaderOptions options) : base(options) => this.stream = stream; - internal override void ValidateArchive(RarVolume archive) + protected override void ValidateArchive(RarVolume archive) { if (archive.IsMultiVolume) { diff --git a/src/SharpCompress/Readers/Zip/ZipReader.cs b/src/SharpCompress/Readers/Zip/ZipReader.cs index b00690f0..cda3f3f8 100644 --- a/src/SharpCompress/Readers/Zip/ZipReader.cs +++ b/src/SharpCompress/Readers/Zip/ZipReader.cs @@ -69,7 +69,6 @@ protected override IEnumerable GetEntries(Stream stream) switch (h.ZipHeaderType) { case ZipHeaderType.LocalEntry: - { yield return new ZipEntry( new StreamingZipFilePart((LocalEntryHeader)h, stream) diff --git a/src/SharpCompress/Writers/AbstractWriter.cs b/src/SharpCompress/Writers/AbstractWriter.cs index 209c49f7..1820c986 100644 --- a/src/SharpCompress/Writers/AbstractWriter.cs +++ b/src/SharpCompress/Writers/AbstractWriter.cs @@ -1,28 +1,24 @@ -#nullable disable - -using System; +using System; using System.IO; using SharpCompress.Common; namespace SharpCompress.Writers; -public abstract class AbstractWriter : IWriter +#pragma warning disable CS8618 // Non-nullable field must contain a non-null value when exiting constructor. Consider declaring as nullable. +public abstract class AbstractWriter(ArchiveType type, WriterOptions writerOptions) : IWriter { private bool _isDisposed; - protected AbstractWriter(ArchiveType type, WriterOptions writerOptions) - { - WriterType = type; - WriterOptions = writerOptions; - } + //always initializes the stream - protected void InitalizeStream(Stream stream) => OutputStream = stream; + protected void InitializeStream(Stream stream) => OutputStream = stream; protected Stream OutputStream { get; private set; } +#pragma warning restore CS8618 // Non-nullable field must contain a non-null value when exiting constructor. Consider declaring as nullable. - public ArchiveType WriterType { get; } + public ArchiveType WriterType { get; } = type; - protected WriterOptions WriterOptions { get; } + protected WriterOptions WriterOptions { get; } = writerOptions; public abstract void Write(string filename, Stream source, DateTime? modificationTime); diff --git a/src/SharpCompress/Writers/GZip/GZipWriter.cs b/src/SharpCompress/Writers/GZip/GZipWriter.cs index 5c990a7d..f30fdb55 100644 --- a/src/SharpCompress/Writers/GZip/GZipWriter.cs +++ b/src/SharpCompress/Writers/GZip/GZipWriter.cs @@ -18,7 +18,7 @@ public GZipWriter(Stream destination, GZipWriterOptions? options = null) { destination = NonDisposingStream.Create(destination); } - InitalizeStream( + InitializeStream( new GZipStream( destination, CompressionMode.Compress, diff --git a/src/SharpCompress/Writers/Tar/TarWriter.cs b/src/SharpCompress/Writers/Tar/TarWriter.cs index 90242857..a3e572f1 100644 --- a/src/SharpCompress/Writers/Tar/TarWriter.cs +++ b/src/SharpCompress/Writers/Tar/TarWriter.cs @@ -32,19 +32,16 @@ public TarWriter(Stream destination, TarWriterOptions options) case CompressionType.None: break; case CompressionType.BZip2: - { destination = new BZip2Stream(destination, CompressionMode.Compress, false); } break; case CompressionType.GZip: - { destination = new GZipStream(destination, CompressionMode.Compress); } break; case CompressionType.LZip: - { destination = new LZipStream(destination, CompressionMode.Compress); } @@ -56,7 +53,7 @@ public TarWriter(Stream destination, TarWriterOptions options) ); } } - InitalizeStream(destination); + InitializeStream(destination); } public override void Write(string filename, Stream source, DateTime? modificationTime) => diff --git a/src/SharpCompress/Writers/Zip/ZipWriter.cs b/src/SharpCompress/Writers/Zip/ZipWriter.cs index 9dc094e2..1fb66a2b 100644 --- a/src/SharpCompress/Writers/Zip/ZipWriter.cs +++ b/src/SharpCompress/Writers/Zip/ZipWriter.cs @@ -42,14 +42,14 @@ public ZipWriter(Stream destination, ZipWriterOptions zipWriterOptions) { destination = NonDisposingStream.Create(destination); } - InitalizeStream(destination); + InitializeStream(destination); } private PpmdProperties PpmdProperties => ppmdProps ??= new PpmdProperties(); protected override void Dispose(bool isDisposing) { - if (isDisposing) + if (isDisposing && OutputStream is not null) { ulong size = 0; foreach (var entry in entries) @@ -114,7 +114,7 @@ public Stream WriteToStream(string entryPath, ZipWriterEntryOptions options) streamPosition += headersize; return new ZipWritingStream( this, - OutputStream, + OutputStream.NotNull(), entry, compression, options.DeflateCompressionLevel ?? compressionLevel @@ -419,13 +419,14 @@ protected override void Dispose(bool disposing) return; } + var countingCount = counting?.Count ?? 0; entry.Crc = (uint)crc.Crc32Result; - entry.Compressed = counting!.Count; + entry.Compressed = countingCount; entry.Decompressed = decompressed; var zip64 = entry.Compressed >= uint.MaxValue || entry.Decompressed >= uint.MaxValue; - var compressedvalue = zip64 ? uint.MaxValue : (uint)counting.Count; + var compressedvalue = zip64 ? uint.MaxValue : (uint)countingCount; var decompressedvalue = zip64 ? uint.MaxValue : (uint)entry.Decompressed; if (originalStream.CanSeek) @@ -433,7 +434,7 @@ protected override void Dispose(bool disposing) originalStream.Position = (long)(entry.HeaderOffset + 6); originalStream.WriteByte(0); - if (counting.Count == 0 && entry.Decompressed == 0) + if (countingCount == 0 && entry.Decompressed == 0) { // set compression to STORED for zero byte files (no compression data) originalStream.Position = (long)(entry.HeaderOffset + 8); @@ -520,11 +521,12 @@ public override void Write(byte[] buffer, int offset, int count) // if we can prevent the writes from happening if (entry.Zip64HeaderOffset == 0) { + var countingCount = counting?.Count ?? 0; // Pre-check, the counting.Count is not exact, as we do not know the size before having actually compressed it if ( limitsExceeded || ((decompressed + (uint)count) > uint.MaxValue) - || (counting!.Count + (uint)count) > uint.MaxValue + || (countingCount + (uint)count) > uint.MaxValue ) { throw new NotSupportedException( @@ -539,8 +541,9 @@ public override void Write(byte[] buffer, int offset, int count) if (entry.Zip64HeaderOffset == 0) { + var countingCount = counting?.Count ?? 0; // Post-check, this is accurate - if ((decompressed > uint.MaxValue) || counting!.Count > uint.MaxValue) + if ((decompressed > uint.MaxValue) || countingCount > uint.MaxValue) { // We have written the data, so the archive is now broken // Throwing the exception here, allows us to avoid diff --git a/tests/SharpCompress.Test/ArchiveTests.cs b/tests/SharpCompress.Test/ArchiveTests.cs index 41135e7d..7d94b29d 100644 --- a/tests/SharpCompress.Test/ArchiveTests.cs +++ b/tests/SharpCompress.Test/ArchiveTests.cs @@ -240,12 +240,10 @@ protected void ArchiveFileSkip( ReaderOptions? readerOptions = null ) { -#if !NETFRAMEWORK - if (!OperatingSystem.IsWindows()) + if (!Environment.OSVersion.IsWindows()) { fileOrder = fileOrder.Replace('\\', '/'); } -#endif var expected = new Stack(fileOrder.Split(' ')); testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive); using var archive = ArchiveFactory.Open(testArchive, readerOptions); diff --git a/tests/SharpCompress.Test/GZip/GZipArchiveTests.cs b/tests/SharpCompress.Test/GZip/GZipArchiveTests.cs index 5b497d9f..80a9a9a1 100644 --- a/tests/SharpCompress.Test/GZip/GZipArchiveTests.cs +++ b/tests/SharpCompress.Test/GZip/GZipArchiveTests.cs @@ -19,7 +19,7 @@ public void GZip_Archive_Generic() using (var archive = ArchiveFactory.Open(stream)) { var entry = archive.Entries.First(); - entry.WriteToFile(Path.Combine(SCRATCH_FILES_PATH, entry.Key)); + entry.WriteToFile(Path.Combine(SCRATCH_FILES_PATH, entry.Key.NotNull())); var size = entry.Size; var scratch = new FileInfo(Path.Combine(SCRATCH_FILES_PATH, "Tar.tar")); @@ -41,7 +41,7 @@ public void GZip_Archive() using (var archive = GZipArchive.Open(stream)) { var entry = archive.Entries.First(); - entry.WriteToFile(Path.Combine(SCRATCH_FILES_PATH, entry.Key)); + entry.WriteToFile(Path.Combine(SCRATCH_FILES_PATH, entry.Key.NotNull())); var size = entry.Size; var scratch = new FileInfo(Path.Combine(SCRATCH_FILES_PATH, "Tar.tar")); @@ -94,6 +94,7 @@ public void GZip_Archive_Multiple_Reads() using (var entryStream = archiveEntry.OpenEntryStream()) { var result = TarArchive.IsTarFile(entryStream); + Assert.True(result); } Assert.Equal(size, tarStream.Length); using (var entryStream = archiveEntry.OpenEntryStream()) diff --git a/tests/SharpCompress.Test/OperatingSystemExtensions.cs b/tests/SharpCompress.Test/OperatingSystemExtensions.cs new file mode 100644 index 00000000..c8ca768e --- /dev/null +++ b/tests/SharpCompress.Test/OperatingSystemExtensions.cs @@ -0,0 +1,11 @@ +using System; + +namespace SharpCompress.Test; + +public static class OperatingSystemExtensions +{ + public static bool IsWindows(this OperatingSystem os) => + os.Platform == PlatformID.Win32NT + || os.Platform == PlatformID.Win32Windows + || os.Platform == PlatformID.Win32S; +} diff --git a/tests/SharpCompress.Test/Rar/RarReaderTests.cs b/tests/SharpCompress.Test/Rar/RarReaderTests.cs index e2b100f6..6ba605a5 100644 --- a/tests/SharpCompress.Test/Rar/RarReaderTests.cs +++ b/tests/SharpCompress.Test/Rar/RarReaderTests.cs @@ -209,7 +209,7 @@ private void DoRar_Entry_Stream(string filename) { Assert.Equal(CompressionType.Rar, reader.Entry.CompressionType); using var entryStream = reader.OpenEntryStream(); - var file = Path.GetFileName(reader.Entry.Key); + var file = Path.GetFileName(reader.Entry.Key).NotNull(); var folder = Path.GetDirectoryName(reader.Entry.Key) ?? throw new ArgumentNullException(); @@ -293,7 +293,7 @@ private void DoRar_Solid_Skip_Reader(string filename) using var reader = ReaderFactory.Open(stream, new ReaderOptions { LookForHeader = true }); while (reader.MoveToNextEntry()) { - if (reader.Entry.Key.Contains("jpg")) + if (reader.Entry.Key.NotNull().Contains("jpg")) { Assert.Equal(CompressionType.Rar, reader.Entry.CompressionType); reader.WriteEntryToDirectory( @@ -316,7 +316,7 @@ private void DoRar_Reader_Skip(string filename) using var reader = ReaderFactory.Open(stream, new ReaderOptions { LookForHeader = true }); while (reader.MoveToNextEntry()) { - if (reader.Entry.Key.Contains("jpg")) + if (reader.Entry.Key.NotNull().Contains("jpg")) { Assert.Equal(CompressionType.Rar, reader.Entry.CompressionType); reader.WriteEntryToDirectory( diff --git a/tests/SharpCompress.Test/ReaderTests.cs b/tests/SharpCompress.Test/ReaderTests.cs index b62dd4d2..7ea1d7b7 100644 --- a/tests/SharpCompress.Test/ReaderTests.cs +++ b/tests/SharpCompress.Test/ReaderTests.cs @@ -77,12 +77,10 @@ protected void Iterate( ReaderOptions? options = null ) { -#if !NETFRAMEWORK - if (!OperatingSystem.IsWindows()) + if (!Environment.OSVersion.IsWindows()) { fileOrder = fileOrder.Replace('\\', '/'); } -#endif var expected = new Stack(fileOrder.Split(' ')); testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive); diff --git a/tests/SharpCompress.Test/SevenZip/SevenZipArchiveTests.cs b/tests/SharpCompress.Test/SevenZip/SevenZipArchiveTests.cs index a0c9d8c9..32d978a2 100644 --- a/tests/SharpCompress.Test/SevenZip/SevenZipArchiveTests.cs +++ b/tests/SharpCompress.Test/SevenZip/SevenZipArchiveTests.cs @@ -182,7 +182,7 @@ public void SevenZipArchive_Tar_PathRead() using (var archive = SevenZipArchive.Open(stream)) { var entry = archive.Entries.First(); - entry.WriteToFile(Path.Combine(SCRATCH_FILES_PATH, entry.Key)); + entry.WriteToFile(Path.Combine(SCRATCH_FILES_PATH, entry.Key.NotNull())); var size = entry.Size; var scratch = new FileInfo(Path.Combine(SCRATCH_FILES_PATH, "7Zip.Tar.tar")); diff --git a/tests/SharpCompress.Test/Tar/TarArchiveTests.cs b/tests/SharpCompress.Test/Tar/TarArchiveTests.cs index 1ee6b783..bbf96c00 100644 --- a/tests/SharpCompress.Test/Tar/TarArchiveTests.cs +++ b/tests/SharpCompress.Test/Tar/TarArchiveTests.cs @@ -195,7 +195,7 @@ public void Tar_Random_Write_Remove() using (var archive = TarArchive.Open(unmodified)) { var entry = archive.Entries.Single(x => - x.Key.EndsWith("jpg", StringComparison.OrdinalIgnoreCase) + x.Key.NotNull().EndsWith("jpg", StringComparison.OrdinalIgnoreCase) ); archive.RemoveEntry(entry); archive.SaveTo(scratchPath, CompressionType.None); diff --git a/tests/SharpCompress.Test/Tar/TarReaderTests.cs b/tests/SharpCompress.Test/Tar/TarReaderTests.cs index 1ee4ae8d..bb10db98 100644 --- a/tests/SharpCompress.Test/Tar/TarReaderTests.cs +++ b/tests/SharpCompress.Test/Tar/TarReaderTests.cs @@ -79,7 +79,7 @@ public void Tar_BZip2_Entry_Stream() { Directory.CreateDirectory(destdir); } - var destinationFileName = Path.Combine(destdir, file); + var destinationFileName = Path.Combine(destdir, file.NotNull()); using var fs = File.OpenWrite(destinationFileName); entryStream.TransferTo(fs); @@ -105,7 +105,7 @@ public void Tar_LongNamesWithLongNameExtension() { if (!reader.Entry.IsDirectory) { - filePaths.Add(reader.Entry.Key); + filePaths.Add(reader.Entry.Key.NotNull("Entry Key is null")); } } } @@ -135,7 +135,7 @@ public void Tar_BZip2_Skip_Entry_Stream() Assert.Equal(CompressionType.BZip2, reader.Entry.CompressionType); using var entryStream = reader.OpenEntryStream(); entryStream.SkipEntry(); - names.Add(reader.Entry.Key); + names.Add(reader.Entry.Key.NotNull()); } } Assert.Equal(3, names.Count); @@ -224,7 +224,7 @@ public void Tar_GZip_With_Symlink_Entries() { if (reader.Entry.LinkTarget != null) { - var path = Path.Combine(SCRATCH_FILES_PATH, reader.Entry.Key); + var path = Path.Combine(SCRATCH_FILES_PATH, reader.Entry.Key.NotNull()); var link = new Mono.Unix.UnixSymbolicLinkInfo(path); if (link.HasContents) { diff --git a/tests/SharpCompress.Test/TestBase.cs b/tests/SharpCompress.Test/TestBase.cs index ad39b2d7..184e6442 100644 --- a/tests/SharpCompress.Test/TestBase.cs +++ b/tests/SharpCompress.Test/TestBase.cs @@ -216,8 +216,8 @@ protected void CompareArchivesByPath(string file1, string file2, Encoding? encod while (archive1.MoveToNextEntry()) { Assert.True(archive2.MoveToNextEntry()); - archive1Entries.Add(archive1.Entry.Key); - archive2Entries.Add(archive2.Entry.Key); + archive1Entries.Add(archive1.Entry.Key.NotNull()); + archive2Entries.Add(archive2.Entry.Key.NotNull()); } Assert.False(archive2.MoveToNextEntry()); } diff --git a/tests/SharpCompress.Test/Zip/ZipArchiveTests.cs b/tests/SharpCompress.Test/Zip/ZipArchiveTests.cs index 1948b26a..2e12b1ff 100644 --- a/tests/SharpCompress.Test/Zip/ZipArchiveTests.cs +++ b/tests/SharpCompress.Test/Zip/ZipArchiveTests.cs @@ -193,7 +193,7 @@ public void Zip_Random_Write_Remove() using (var archive = ZipArchive.Open(unmodified)) { var entry = archive.Entries.Single(x => - x.Key.EndsWith("jpg", StringComparison.OrdinalIgnoreCase) + x.Key.NotNull().EndsWith("jpg", StringComparison.OrdinalIgnoreCase) ); archive.RemoveEntry(entry); @@ -249,16 +249,18 @@ public void Zip_Removal_Poly() var scratchPath = Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.noEmptyDirs.zip"); using var vfs = (ZipArchive)ArchiveFactory.Open(scratchPath); - var e = vfs.Entries.First(v => v.Key.EndsWith("jpg", StringComparison.OrdinalIgnoreCase)); + var e = vfs.Entries.First(v => + v.Key.NotNull().EndsWith("jpg", StringComparison.OrdinalIgnoreCase) + ); vfs.RemoveEntry(e); Assert.Null( vfs.Entries.FirstOrDefault(v => - v.Key.EndsWith("jpg", StringComparison.OrdinalIgnoreCase) + v.Key.NotNull().EndsWith("jpg", StringComparison.OrdinalIgnoreCase) ) ); Assert.Null( ((IArchive)vfs).Entries.FirstOrDefault(v => - v.Key.EndsWith("jpg", StringComparison.OrdinalIgnoreCase) + v.Key.NotNull().EndsWith("jpg", StringComparison.OrdinalIgnoreCase) ) ); } @@ -394,12 +396,12 @@ var file in Directory.EnumerateFiles( archive.AddAllFromDirectory(SCRATCH_FILES_PATH); archive.RemoveEntry( archive.Entries.Single(x => - x.Key.EndsWith("jpg", StringComparison.OrdinalIgnoreCase) + x.Key.NotNull().EndsWith("jpg", StringComparison.OrdinalIgnoreCase) ) ); Assert.Null( archive.Entries.FirstOrDefault(x => - x.Key.EndsWith("jpg", StringComparison.OrdinalIgnoreCase) + x.Key.NotNull().EndsWith("jpg", StringComparison.OrdinalIgnoreCase) ) ); }