Skip to content

Commit

Permalink
added stream support
Browse files Browse the repository at this point in the history
  • Loading branch information
SirJosh3917 committed Jul 4, 2018
1 parent 171170b commit f47608f
Show file tree
Hide file tree
Showing 4 changed files with 100 additions and 61 deletions.
63 changes: 2 additions & 61 deletions StringDB.Tester/Program.cs
Original file line number Diff line number Diff line change
@@ -1,73 +1,14 @@
using Newtonsoft.Json;

using StringDB.Reader;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;

namespace StringDB.Tester {

internal class Program {
private static void Main(string[] args) {
using (var db = Database.FromFile("string.db")) {
foreach (var i in db) // loop over every item and say the index
Console.WriteLine(i.Index);

Console.WriteLine(GetSizeOfObject(db));

var fs = System.IO.File.OpenRead("file-to-insert.txt");

db.Insert("very-cool", fs);

db.Insert("test_key", "test_value"); // insert a single item named "test_key"

db.InsertRange(new KeyValuePair<string, string>[] { // insert multiple items
new KeyValuePair<string, string>("test1", "value1"),
new KeyValuePair<string, string>("test2", "value2"),
new KeyValuePair<string, string>("test3", "value3"),
new KeyValuePair<string, string>("test4", "value4"),
new KeyValuePair<string, string>("test5", "value5"),
});

foreach (var i in db) // loop over every item in the DB again and say the index
Console.WriteLine(i.ToString());

var testKey = db.GetByIndex("test_key"); // get test_key

Console.WriteLine(testKey.Value); // say the value of test_key

db.OverwriteValue(testKey, "new_value"); // change the value

Console.WriteLine(testKey.Value); // say the new value

db.OverwriteValue(testKey, "newest_value"); // change the value again

// insert another test_key

db.Insert("test_key", "i'm hidden behind the other test_key!");

// foreach loop

foreach (var i in db)
Console.WriteLine(i.Index);

// will print out the first test_key and the other test key

foreach (var i in db.GetMultipleByIndex("test_key")) //let's get every single test_key
Console.WriteLine(i.Value); //write out the value

// now by doing so many tiny inserts we can save a little space if we clean it

using (var cleaneddb = Database.FromFile("cleaned-string.db")) {
cleaneddb.CleanFrom(db);
}
}

// let's see hwo big the normal database is compared to the cleaned one

Console.WriteLine("unclean: " + new System.IO.FileInfo("string.db").Length + " bytes");
Console.WriteLine("clean: " + new System.IO.FileInfo("cleaned-string.db").Length + " bytes");

Console.ReadLine();
}
}

Expand Down
3 changes: 3 additions & 0 deletions StringDB/Reader/Enumerability/ReaderPair.cs
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,9 @@ internal ReaderPair(PartDataPair dp, IRawReader rawReader) {
/// <summary>Get the value as a byte array instead.</summary>
public byte[] ByteArrayValue => this._byteValueCache ?? (this._byteValueCache = (this._dp.ReadData(this._rawReader) ?? new byte[0] { }));

/// <summary>Get a stream of the value</summary>
public System.IO.Stream StreamValue => this._rawReader.GetStreamOfDataAt(this._dp.DataPosition);

/// <summary>Whatever the index is.</summary>
public string Index => this._strIndexCache ?? (this._strIndexCache = this._dp.Index.GetString());

Expand Down
26 changes: 26 additions & 0 deletions StringDB/Reader/RawReader.cs
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ internal interface IRawReader {

IPart ReadOn(IPart previous);

Stream GetStreamOfDataAt(long p);

byte[] ReadDataValueAt(long p);

long ReadDataValueLengthAt(long p);
Expand Down Expand Up @@ -108,6 +110,30 @@ public long ReadDataValueLengthAt(long p) {
#endif
}

public Stream GetStreamOfDataAt(long p) {
#if THREAD_SAFE
lock(_lock) {
#endif
var len = ReadDataValueLengthAt(p);

var newp = p + 1;

if (len < byte.MaxValue)
newp += sizeof(byte);
else if (len < ushort.MaxValue)
newp += sizeof(ushort);
else if (len < uint.MaxValue)
newp += sizeof(uint);
else if ((ulong)len < ulong.MaxValue)
newp += sizeof(ulong);


return new StreamFragment(this._stream, newp, len);
#if THREAD_SAFE
}
#endif
}

public byte[] ReadDataValueAt(long p) {
#if THREAD_SAFE
lock(_lock) {
Expand Down
69 changes: 69 additions & 0 deletions StringDB/Reader/StreamFragment.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;

namespace StringDB.Reader
{
public class StreamFragment : Stream {
public StreamFragment(Stream main, long pos, long lenAfterPos) {
this.Length = lenAfterPos;
this._originalPos = pos;
this._pos = pos;
this._s = main;
}

private Stream _s;

public override bool CanRead => true;
public override bool CanSeek => true;
public override bool CanWrite => false;

public override long Length { get; }

private long _originalPos { get; }
private long _pos;
public override long Position {
get {
return this._pos - this._originalPos;
}
set {
var lP = this._pos;
this._pos = this._originalPos + value;
//TODO: >=
if (this._pos - this._originalPos < 0 || this._pos - this._originalPos > this.Length)
this._pos = lP;
}
}

public override void Flush() { }

public override int Read(byte[] buffer, int offset, int count) {
if (this._pos - this._originalPos < 0)
return -1;

var c = count;
if (this._pos - this._originalPos + c > this.Length)
c += (int)( this.Length - ((this._pos - this._originalPos) + c) );

this._s.Seek(this._pos, SeekOrigin.Begin);
this._pos += c;
return this._s.Read(buffer, offset, c);
}

public override long Seek(long offset, SeekOrigin origin) {
if(origin == SeekOrigin.Begin) {
this.Position = offset;
} else if (origin == SeekOrigin.Current) {
this.Position += offset;
} else if (origin == SeekOrigin.End) {
this.Position = this.Length + offset;
}

return this.Position;
}

public override void SetLength(long value) => throw new NotImplementedException();
public override void Write(byte[] buffer, int offset, int count) => throw new NotImplementedException();
}
}

0 comments on commit f47608f

Please sign in to comment.