Skip to content
This repository has been archived by the owner on Feb 4, 2022. It is now read-only.

Commit

Permalink
Update
Browse files Browse the repository at this point in the history
  • Loading branch information
aianlinb committed Dec 1, 2020
1 parent 8ab1067 commit ae2bbff
Show file tree
Hide file tree
Showing 5 changed files with 193 additions and 82 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
##
## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore

oo2core_8_win64.dll
*.zip

# User-specific files
Expand Down
173 changes: 134 additions & 39 deletions LibBundle/BundleContainer.cs
Original file line number Diff line number Diff line change
@@ -1,5 +1,10 @@
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;

namespace LibBundle
{
Expand Down Expand Up @@ -42,6 +47,7 @@ public enum COMPRESSTION_LEVEL

public string path;
public long offset;
public COMPRESSTION_LEVEL Compression_Level = COMPRESSTION_LEVEL.Normal;

public int uncompressed_size;
public int compressed_size;
Expand Down Expand Up @@ -117,48 +123,138 @@ public virtual MemoryStream Read(BinaryReader br)
var chunks = new int[entry_count];
for (int i = 0; i < entry_count; i++)
chunks[i] = br.ReadInt32();


var compressed = new byte[entry_count][];
for (int i = 0; i < entry_count; i++)
compressed[i] = br.ReadBytes(chunks[i]);

Parallel.For(0, entry_count, i => {
var size = (i + 1 == entry_count) ? uncompressed_size - (chunk_size * (entry_count - 1)) : chunk_size; // isLast ?
var toSave = new byte[size + 64];
OodleLZ_Decompress(compressed[i], compressed[i].Length, toSave, size, 0, 0, 0, IntPtr.Zero, 0, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero, 0, 3);
compressed[i] = toSave.Take(size).ToArray();
});

var data = new MemoryStream(uncompressed_size);
for (int i = 0; i < entry_count; i++)
data.Write(compressed[i]);

return data;
}

public virtual byte[] AppendAndSave(Stream newData, string path = null)
{
if (path == null)
path = this.path;
offset = 0;
return AppendAndSave(newData, File.Open(path, FileMode.Open, FileAccess.Read, FileShare.ReadWrite));
}

public virtual byte[] AppendAndSave(Stream newData, Stream originalData)
{
originalData.Seek(offset + 60, SeekOrigin.Begin);
var OldChunkCompressedSizes = new byte[(entry_count - 1) * 4];
originalData.Read(OldChunkCompressedSizes);

var lastCunkCompressedSize = originalData.ReadByte() | originalData.ReadByte() << 8 | originalData.ReadByte() << 16 | originalData.ReadByte() << 24; // ReadInt32

var lastCunkDecompressedSize = uncompressed_size - (chunk_size * (entry_count - 1));

uncompressed_size = (int)(size_decompressed += newData.Length);
entry_count = uncompressed_size / chunk_size;
if (uncompressed_size % chunk_size != 0) entry_count++;
head_size = entry_count * 4 + 48;

var msToSave = new MemoryStream();
var bw = new BinaryWriter(msToSave);

msToSave.Seek(60 + (entry_count * 4), SeekOrigin.Begin);
var o = new byte[compressed_size - lastCunkCompressedSize];
originalData.Read(o);
bw.Write(o);

var lastChunkCompressedData = new byte[lastCunkCompressedSize];
originalData.Read(lastChunkCompressedData);
var lastCunkDecompressedData = new byte[lastCunkDecompressedSize + 64];
OodleLZ_Decompress(lastChunkCompressedData, lastCunkCompressedSize, lastCunkDecompressedData, lastCunkDecompressedSize, 0, 0, 0, IntPtr.Zero, 0, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero, 0, 3);

newData.Seek(0, SeekOrigin.Begin);
compressed_size -= lastCunkCompressedSize;
var NewChunkCompressedSizes = new int[entry_count - (OldChunkCompressedSizes.Length / 4)];

var FirstNewDataChunk = new byte[Math.Min(chunk_size - lastCunkDecompressedSize, newData.Length)];
newData.Read(FirstNewDataChunk);
FirstNewDataChunk = lastCunkDecompressedData.Take(lastCunkDecompressedSize).Concat(FirstNewDataChunk).ToArray(); // Decompressed
var CompressedChunk = new byte[FirstNewDataChunk.Length + 548];
var CompressedLength = OodleLZ_Compress(encoder, FirstNewDataChunk, FirstNewDataChunk.Length, CompressedChunk, Compression_Level, IntPtr.Zero, 0, 0, IntPtr.Zero, 0);
compressed_size += NewChunkCompressedSizes[0] = CompressedLength;
bw.Write(CompressedChunk, 0, CompressedLength); // Compressed
var byteArrays = new byte[NewChunkCompressedSizes.Length][];
for (int i = 1; i < NewChunkCompressedSizes.Length; i++)
{
var b = br.ReadBytes(chunks[i]);
int size = (i + 1 == entry_count) ? uncompressed_size - (chunk_size * (entry_count - 1)) : chunk_size; // isLast ?
var toSave = new byte[size + 64];
OodleLZ_Decompress(b, b.Length, toSave, size, 0, 0, 0, IntPtr.Zero, 0, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero, 0, 3);
data.Write(toSave, 0, size);
var size = (i + 1 == NewChunkCompressedSizes.Length) ? uncompressed_size - (chunk_size * (entry_count - 1)) : chunk_size;
newData.Read(byteArrays[i] = new byte[size]);
}
return data;
Parallel.For(1, NewChunkCompressedSizes.Length, i => {
var by = new byte[byteArrays[i].Length + 548];
var l = OodleLZ_Compress(encoder, byteArrays[i], byteArrays[i].Length, by, Compression_Level, IntPtr.Zero, 0, 0, IntPtr.Zero, 0);
byteArrays[i] = by;
Interlocked.Add(ref compressed_size, NewChunkCompressedSizes[i] = l);
});
for (int i = 0; i < NewChunkCompressedSizes.Length - 1; i++)
bw.Write(byteArrays[i], 0, NewChunkCompressedSizes[i]);

size_compressed = compressed_size;

msToSave.Seek(60, SeekOrigin.Begin);
bw.Write(OldChunkCompressedSizes);
for (int i = 0; i < NewChunkCompressedSizes.Length; i++)
bw.Write(NewChunkCompressedSizes[i]);

msToSave.Seek(0, SeekOrigin.Begin);
bw.Write(uncompressed_size);
bw.Write(compressed_size);
bw.Write(head_size);
bw.Write((uint)encoder);
bw.Write(unknown);
bw.Write(size_decompressed);
bw.Write(size_compressed);
bw.Write(entry_count);
bw.Write(chunk_size);
bw.Write(unknown3);
bw.Write(unknown4);
bw.Write(unknown5);
bw.Write(unknown6);

bw.Flush();
var result = msToSave.ToArray();
bw.Close();
return result;
}

//Packing
public virtual void Save(Stream ms, string path)
public virtual void Save(Stream newData, string path)
{
var bw = new BinaryWriter(File.Open(path, FileMode.Open, FileAccess.Write, FileShare.ReadWrite));

uncompressed_size = (int)(size_decompressed = ms.Length);
uncompressed_size = (int)(size_decompressed = newData.Length);
entry_count = uncompressed_size / chunk_size;
if (uncompressed_size % chunk_size != 0) entry_count++;
head_size = entry_count * 4 + 48;

bw.BaseStream.Seek(60 + (entry_count * 4), SeekOrigin.Begin);
ms.Position = 0;
newData.Seek(0, SeekOrigin.Begin);
compressed_size = 0;
var chunks = new int[entry_count];
for (int i = 0; i < entry_count - 1; i++)
for (int i = 0; i < entry_count; i++)
{
var b = new byte[chunk_size];
ms.Read(b, 0, chunk_size);
var b = new byte[i + 1 == entry_count ? uncompressed_size - (entry_count - 1) * chunk_size : chunk_size];
newData.Read(b, 0, b.Length);
var by = new byte[b.Length + 548];
var l = OodleLZ_Compress(ENCODE_TYPES.LEVIATHAN, b, b.Length, by, COMPRESSTION_LEVEL.Normal, IntPtr.Zero, 0, 0, IntPtr.Zero, 0);
var l = OodleLZ_Compress(encoder, b, b.Length, by, Compression_Level, IntPtr.Zero, 0, 0, IntPtr.Zero, 0);
compressed_size += chunks[i] = l;
bw.Write(by, 0, l);
}
var b2 = new byte[ms.Length - (entry_count - 1) * chunk_size];
ms.Read(b2, 0, b2.Length);
var by2 = new byte[b2.Length + 548];
var l2 = OodleLZ_Compress(ENCODE_TYPES.LEVIATHAN, b2, b2.Length, by2, COMPRESSTION_LEVEL.Normal, IntPtr.Zero, 0, 0, IntPtr.Zero, 0);
compressed_size += chunks[entry_count - 1] = l2;
bw.Write(by2, 0, l2);
size_compressed = compressed_size;

bw.BaseStream.Seek(60, SeekOrigin.Begin);
Expand All @@ -184,42 +280,41 @@ public virtual void Save(Stream ms, string path)
bw.Close();
}
//Packing
public virtual byte[] Save(Stream ms)
public virtual byte[] Save(Stream newData)
{
var msToSave = new MemoryStream();
var bw = new BinaryWriter(msToSave);

uncompressed_size = (int)(size_decompressed = ms.Length);
uncompressed_size = (int)(size_decompressed = newData.Length);
entry_count = uncompressed_size / chunk_size;
if (uncompressed_size % chunk_size != 0) entry_count++;
head_size = entry_count * 4 + 48;

bw.BaseStream.Seek(60 + (entry_count * 4), SeekOrigin.Begin);
ms.Position = 0;
msToSave.Seek(60 + (entry_count * 4), SeekOrigin.Begin);
newData.Seek(0, SeekOrigin.Begin);
compressed_size = 0;
var chunks = new int[entry_count];
for (int i = 0; i < entry_count - 1; i++)
var byteArrays = new byte[entry_count][];
for (int i = 0; i < entry_count; i++)
{
var b = new byte[chunk_size];
ms.Read(b, 0, chunk_size);
var by = new byte[b.Length + 548];
var l = OodleLZ_Compress(ENCODE_TYPES.LEVIATHAN, b, b.Length, by, COMPRESSTION_LEVEL.Normal, IntPtr.Zero, 0, 0, IntPtr.Zero, 0);
compressed_size += chunks[i] = l;
bw.Write(by, 0, l);
var b = new byte[i + 1 == entry_count ? uncompressed_size - (entry_count - 1) * chunk_size : chunk_size];
newData.Read(byteArrays[i] = b);
}
var b2 = new byte[ms.Length - (entry_count - 1) * chunk_size];
ms.Read(b2, 0, b2.Length);
var by2 = new byte[b2.Length + 548];
var l2 = OodleLZ_Compress(ENCODE_TYPES.LEVIATHAN, b2, b2.Length, by2, COMPRESSTION_LEVEL.Normal, IntPtr.Zero, 0, 0, IntPtr.Zero, 0);
compressed_size += chunks[entry_count - 1] = l2;
bw.Write(by2, 0, l2);
Parallel.For(0, entry_count, i => {
var by = new byte[byteArrays[i].Length + 548];
var l = OodleLZ_Compress(encoder, byteArrays[i], byteArrays[i].Length, by, Compression_Level, IntPtr.Zero, 0, 0, IntPtr.Zero, 0);
byteArrays[i] = by;
Interlocked.Add(ref compressed_size, chunks[i] = l);
});
size_compressed = compressed_size;
for (int i = 0; i < entry_count; i++)
bw.Write(byteArrays[i], 0, chunks[i]);

bw.BaseStream.Seek(60, SeekOrigin.Begin);
msToSave.Seek(60, SeekOrigin.Begin);
for (int i = 0; i < entry_count; i++)
bw.Write(chunks[i]);

bw.BaseStream.Seek(0, SeekOrigin.Begin);
msToSave.Seek(0, SeekOrigin.Begin);
bw.Write(uncompressed_size);
bw.Write(compressed_size);
bw.Write(head_size);
Expand Down
21 changes: 10 additions & 11 deletions LibBundle/IndexContainer.cs
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,10 @@ public IndexContainer(BinaryReader br)
var f = new FileRecord(databr);
Files[i] = f;
FindFiles[f.Hash] = f;
f.bundleRecord = Bundles[f.BundleIndex];
Bundles[f.BundleIndex].Files.Add(f);
var b = Bundles[f.BundleIndex];
f.bundleRecord = b;
b.Files.Add(f);
if (f.Offset >= b.validSize) b.validSize = f.Offset + f.Size;
}

int directoryCount = databr.ReadInt32();
Expand Down Expand Up @@ -103,8 +105,8 @@ public virtual void Save(string path)
bw.Write(Bundles.Length);
foreach (var b in Bundles)
{
bw.Write(b.nameLength);
bw.Write(Encoding.UTF8.GetBytes(b.Name), 0, b.nameLength);
bw.Write(b.NameLength);
bw.Write(Encoding.UTF8.GetBytes(b.Name), 0, b.NameLength);
bw.Write(b.UncompressedSize);
}
bw.Write(Files.Length);
Expand Down Expand Up @@ -135,8 +137,8 @@ public virtual byte[] Save()
bw.Write(Bundles.Length);
foreach (var b in Bundles)
{
bw.Write(b.nameLength);
bw.Write(Encoding.UTF8.GetBytes(b.Name), 0, b.nameLength);
bw.Write(b.NameLength);
bw.Write(Encoding.UTF8.GetBytes(b.Name), 0, b.NameLength);
bw.Write(b.UncompressedSize);
}
bw.Write(Files.Length);
Expand Down Expand Up @@ -178,11 +180,8 @@ public BundleRecord GetSmallestBundle(IList<BundleRecord> Bundles = null)

public static ulong FNV1a64Hash(string str)
{
if (str.EndsWith("/"))
{
str.TrimEnd(new char[] { '/' });
str += "++";
}
if (str.EndsWith('/'))
str = str.TrimEnd(new char[] { '/' }) + "++";
else
str = str.ToLower() + "++";

Expand Down
12 changes: 9 additions & 3 deletions LibBundle/LibBundle.csproj
Original file line number Diff line number Diff line change
@@ -1,17 +1,23 @@
<Project Sdk="Microsoft.NET.Sdk">

<PropertyGroup>
<TargetFramework>netcoreapp3.1</TargetFramework>
<TargetFramework>net5.0</TargetFramework>
<Copyright>Copyright © 2020 aianlinb</Copyright>
<AssemblyVersion>2.2.0.2</AssemblyVersion>
<AssemblyVersion>2.3.0.0</AssemblyVersion>
<OutputType>Library</OutputType>
</PropertyGroup>

<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|AnyCPU'">
<DefineConstants>DEBUG;TRACE</DefineConstants>
<PlatformTarget>x64</PlatformTarget>
<Optimize>true</Optimize>
<Optimize>false</Optimize>
<OutputPath>..\Release</OutputPath>
<DebugType>full</DebugType>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>

<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|AnyCPU'">
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>

<ItemGroup>
Expand Down
Loading

0 comments on commit ae2bbff

Please sign in to comment.