Initial Commit

master
Harald Wolff-Thobaben 2020-11-18 00:24:25 +01:00
commit 2ea02f8633
67 changed files with 7081 additions and 0 deletions

41
.gitignore vendored 100644
View File

@ -0,0 +1,41 @@
# Autosave files
*~
# build
[Oo]bj/
[Bb]in/
packages/
TestResults/
# globs
Makefile.in
*.DS_Store
*.sln.cache
*.suo
*.cache
*.pidb
*.userprefs
*.usertasks
config.log
config.make
config.status
aclocal.m4
install-sh
autom4te.cache/
*.user
*.tar.gz
tarballs/
test-results/
Thumbs.db
.vs/
# Mac bundle stuff
*.dmg
*.app
# resharper
*_Resharper.*
*.Resharper
# dotCover
*.dotCover

40
ObjectCache.cs 100644
View File

@ -0,0 +1,40 @@
using ln.collections;
using System;
using System.Collections.Generic;
using System.Text;
namespace ln.objects
{
class ObjectCache
{
WeakKeyReferenceDictionary<object, Guid> guidLookup = new WeakKeyReferenceDictionary<object, Guid>();
WeakValueDictionary<Guid, object> objectLookup = new WeakValueDictionary<Guid, object>();
public ObjectCache()
{ }
public object this[Guid uid] => objectLookup[uid];
public Guid this[object o] => guidLookup[o];
public bool TryGetUID(object o, out Guid uid) => guidLookup.TryGetValue(o, out uid);
public bool TryGetObject(Guid uid, out object o) => objectLookup.TryGetValue(uid, out o);
public void Add(Guid uid, object o)
{
guidLookup.Add(o, uid);
objectLookup.Add(uid, o);
}
public void Remove(Guid uid)
{
guidLookup.Remove(objectLookup[uid]);
objectLookup.Remove(uid);
}
public void Remove(object o)
{
objectLookup.Remove(guidLookup[o]);
guidLookup.Remove(o);
}
}
}

351
ObjectStore.cs 100644
View File

@ -0,0 +1,351 @@
using ln.logging;
using ln.objects.index;
using ln.objects.serialization;
using ln.objects.storage;
using ln.type;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.Serialization;
using ln.objects.serialization.binary;
using ln.collections;
using System;
namespace ln.objects
{
public class ObjectStore : IDisposable
{
public static bool DEBUG;
public static Guid TYPECACHE_GUID = new Guid("00000000-0000-0000-0001-000000000001");
public static Guid SPECIAL_GUID_LIMIT = new Guid("00000000-0000-0000-0002-000000000000");
public String FileName => objectFile.FileName;
public Factory<Serializer, ObjectStore> SerializerFactory { get; }
public Factory<Deserializer, ObjectStore> DeserializerFactory { get; }
Deserializer defaultDeserializer;
BinaryObjectFile objectFile;
ObjectCache objectCache;
BTreeValueSet<Type, Guid> objectTypeCache;
Dictionary<Type, IndexLeaf> indeces = new Dictionary<Type, IndexLeaf>();
SaveTransaction currentTransaction;
public ObjectStore(String filename)
:this(filename,() => new BinarySerializer(),() => new BinaryDeserializer())
{
}
public ObjectStore(string filename, Factory<Serializer> serializerFactory, Factory<Deserializer> deserializerFactory)
: this(filename, (o) => serializerFactory(), (o) => deserializerFactory())
{
}
public ObjectStore(string filename, Factory<Serializer, ObjectStore> serializerFactory, Factory<Deserializer, ObjectStore> deserializerFactory)
{
SerializerFactory = serializerFactory;
DeserializerFactory = deserializerFactory;
objectFile = new BinaryObjectFile(filename);
objectCache = new ObjectCache();
}
public void Open()
{
lock (this)
{
objectFile.Open();
defaultDeserializer = DeserializerFactory(this);
defaultDeserializer.OnLookupObjectByReference += LookupObjectByReference;
if (objectFile.TryReadBinaryObject(TYPECACHE_GUID, out byte[] typeCacheBytes))
{
object otc = null;
if (defaultDeserializer.DeserializeObject(typeCacheBytes, ref otc))
{
KeyValuePair<String, Guid>[] typeCacheValues = (KeyValuePair<String, Guid>[])otc;
objectTypeCache = new BTreeValueSet<Type, Guid>();
objectTypeCache.AddRange(typeCacheValues.Select((kvp) => new KeyValuePair<Type, Guid>(Type.GetType(kvp.Key), kvp.Value)));
objectFile.RemoveBinaryObjects(TYPECACHE_GUID);
objectFile.Flush();
}
else
{
objectFile.RemoveBinaryObjects(TYPECACHE_GUID);
objectFile.Flush();
objectTypeCache = new BTreeValueSet<Type, Guid>();
foreach (Guid uid in objectFile.ObjectUIDs)
{
if (objectFile.TryReadBinaryObject(uid, out byte[] serializedBytes))
{
if (defaultDeserializer.TryGetType(serializedBytes, out Type type))
{
objectTypeCache.TryAdd(type, uid);
}
}
}
}
} else
{
objectTypeCache = new BTreeValueSet<Type, Guid>();
}
}
}
public void Close()
{
lock (this)
{
if (objectFile != null)
{
KeyValuePair<String, Guid>[] typeCacheValues = objectTypeCache.GetKeyValuePairs().Select((kvp) => new KeyValuePair<string, Guid>(kvp.Key.GetSimpleQualifiedName(), kvp.Value)).ToArray();
if (SerializerFactory(this).SerializeObject(typeCacheValues, out byte[] typeCacheBytes))
{
objectFile.WriteBinaryObject(TYPECACHE_GUID, typeCacheBytes);
objectFile.Flush();
}
objectFile.Dispose();
objectFile = null;
}
}
}
public T LoadObject<T>(Guid uid) => (T)LoadObject(uid, typeof(T));
public object LoadObject(Guid uid,Type type)
{
lock (this)
{
if (!objectCache.TryGetObject(uid,out object o))
{
o = Activator.CreateInstance(type, true);
objectCache.Add(uid, o);
byte[] boData = objectFile.ReadBinaryObject(uid);
if (!defaultDeserializer.DeserializeObject(boData, ref o))
throw new Exception("unable to deserialize");
}
return o;
}
}
public IEnumerable<object> LoadObjects(Type type)
{
foreach (Guid uid in objectTypeCache[type])
{
yield return LoadObject(uid, type);
}
}
public IEnumerable<T> LoadObjects<T>()
{
foreach (Guid uid in objectTypeCache[typeof(T)])
{
yield return LoadObject<T>(uid);
}
}
private bool LookupObjectByReference(object reference, Type targetType, out object o)
{
if (DEBUG)
Logging.Log(LogLevel.DEBUG, "LookupObjectReference: {0} [ {1} ]", reference, targetType.Name);
o = LoadObject((Guid)reference, targetType);
return true;
}
private void TransactionalAction(SaveTransaction saveTransaction, Action action)
{
lock (this)
{
SaveTransaction oldTransaction = currentTransaction;
currentTransaction = saveTransaction;
try
{
action();
}
finally
{
currentTransaction = oldTransaction;
}
}
}
public Guid SaveObject(object o) => SaveObject(GetObjectUID(o), o);
public Guid SaveObject(Guid uid, object o)
{
lock (this)
{
SaveTransaction saveTransaction = new SaveTransaction(this);
TransactionalAction(saveTransaction, () => PrepareSave(saveTransaction, uid, o));
foreach (KeyValuePair<Guid, byte[]> bo in saveTransaction.BinaryObjects)
if (bo.Value != null)
{
if (DEBUG)
Logging.Log(LogLevel.DEBUG, "Storing {0} [ {1} ]", bo.Key, objectCache[bo.Key].GetType());
objectFile.WriteBinaryObject(bo.Key, bo.Value);
}
else if (DEBUG)
Logging.Log(LogLevel.DEBUG, "Ignoring unchanged {0} [ {1} ]", bo.Key, objectCache[bo.Key].GetType());
return uid;
}
}
private void PrepareSave(SaveTransaction saveTransaction,Guid uid, object o)
{
saveTransaction.PushReferencedObjects();
if (!saveTransaction.Serializer.SerializeObject(o, out byte[] serializedBytes))
throw new SerializationException();
if (!Object.ReferenceEquals(null, o))
{
objectTypeCache.TryAdd(o.GetType(), uid);
GetIndexLeaf(o.GetType()).Reindex(uid, o);
}
if (objectFile.TryReadBinaryObject(uid, out byte[] storedBytes) && storedBytes.AreEqual(serializedBytes))
{
saveTransaction.BinaryObjects.Add(uid, null);
saveTransaction.PopReferencedObjects();
}
else
{
saveTransaction.BinaryObjects.Add(uid, serializedBytes);
saveTransaction.PopReferencedObjectsToFinal();
}
foreach (object ro in saveTransaction.ReferencedObjects.ToArray())
{
Guid oUid = GetObjectUID(ro);
if (!saveTransaction.BinaryObjects.ContainsKey(oUid))
PrepareSave(saveTransaction, oUid, ro);
}
}
public Guid GetObjectUID(object o)
{
lock (this)
{
if (!objectCache.TryGetUID(o, out Guid uid))
{
uid = Guid.NewGuid();
objectCache.Add(uid, o);
}
return uid;
}
}
private IndexLeaf GetIndexLeaf(Type type)
{
if (!indeces.TryGetValue(type,out IndexLeaf indexLeaf))
{
indexLeaf = new IndexLeaf(type, null, (o) => o);
indeces.Add(type, indexLeaf);
}
return indexLeaf;
}
public bool EnsureIndeces(Type type,IEnumerable<KeyValuePair<string,index.Index>> indexDefinitions)
{
bool rebuild = false;
foreach (KeyValuePair<string, index.Index> indexDefinition in indexDefinitions)
rebuild |= EnsureIndex(type, indexDefinition.Key, indexDefinition.Value, false);
if (rebuild)
RebuildIndeces();
return rebuild;
}
public IEnumerable<object> QueryObjects(Type type, string path, Func<object, bool> criterion) => QueryUids(type, path, criterion).Select((uid) => LoadObject(uid, type));
public IEnumerable<Guid> QueryUids(Type type,string path,Func<object,bool> criterion)
{
HashSet<Guid> result = new HashSet<Guid>();
GetIndexLeaf(type).GetLeaf(path).Match(criterion, result);
return result;
}
public bool EnsureIndex(Type type, string path, index.Index index) => EnsureIndex(type, path, index, true);
private bool EnsureIndex(Type type, string path, index.Index index, bool rebuild)
{
IndexLeaf indexLeaf = GetIndexLeaf(type).GetLeaf(path);
if (index.GetType().Equals(indexLeaf.Index))
return false;
indexLeaf.Index = index;
if (rebuild)
RebuildIndeces();
return true;
}
private void RebuildIndeces()
{
foreach (Type type in objectTypeCache.Keys)
{
IndexLeaf indexLeaf = GetIndexLeaf(type);
indexLeaf.Clear();
foreach (object value in LoadObjects(type))
indexLeaf.Reindex(GetObjectUID(value), value);
}
}
public void Dispose()
{
Close();
}
class SaveTransaction
{
public ObjectStore ObjectStore { get; }
public Serializer Serializer { get; }
public HashSet<object> ReferencedObjects { get; } = new HashSet<object>();
public Stack<HashSet<object>> referencedObjectsStack { get; } = new Stack<HashSet<object>>();
HashSet<object> currentReferencedObjectsSet => referencedObjectsStack.Peek();
public Dictionary<Guid, byte[]> BinaryObjects { get; } = new Dictionary<Guid, byte[]>();
public SaveTransaction(ObjectStore objectStore)
{
ObjectStore = objectStore;
Serializer = ObjectStore.SerializerFactory(objectStore);
Serializer.OnLookupReference += LookupReference;
}
public bool LookupReference(object value, out object reference)
{
reference = ObjectStore.GetObjectUID(value);
currentReferencedObjectsSet.Add(value);
return true;
}
public void PushReferencedObjects() => referencedObjectsStack.Push(new HashSet<object>());
public void PopReferencedObjects() => referencedObjectsStack.Pop();
public void PopReferencedObjectsToFinal()
{
HashSet<object> ro = referencedObjectsStack.Pop();
foreach (object o in ro)
ReferencedObjects.Add(o);
}
}
}
}

43
catalog/ODBBool.cs 100644
View File

@ -0,0 +1,43 @@
// /**
// * File: ODBBool.cs
// * Author: haraldwolff
// *
// * This file and it's content is copyrighted by the Author and / or copyright holder.
// * Any use wihtout proper permission is illegal and may lead to legal actions.
// *
// *
// **/
using System;
namespace ln.objects.catalog
{
public class ODBBool : ODBValue
{
public static ODBBool True = new ODBBool(true);
public static ODBBool False = new ODBBool(false);
bool isTrue;
private ODBBool(bool b)
: base(0x04, b)
{
isTrue = b;
}
public override byte[] Serialize() => new byte[] { isTrue ? (byte)0xFF : (byte)0x00 };
protected override int compare(ODBEntity other)
{
if (ReferenceEquals(this, other))
return 0;
if (isTrue)
return 1;
return -1;
}
static ODBBool()
{
RegisterDeserializer(0x04, (b, o, l) => b[o] != 0 ? True : False);
}
}
}

View File

@ -0,0 +1,52 @@
// /**
// * File: ODBByteBuffer.cs
// * Author: haraldwolff
// *
// * This file and it's content is copyrighted by the Author and / or copyright holder.
// * Any use wihtout proper permission is illegal and may lead to legal actions.
// *
// *
// **/
using ln.type;
using System;
namespace ln.objects.catalog
{
public class ODBByteBuffer : ODBValue
{
public byte[] GetBytes() => (byte[])Value;
public ODBByteBuffer(byte[] bytes)
: base(0x0800, bytes.Slice(0))
{
}
public override byte[] Serialize() => GetBytes();
protected override int compare(ODBEntity other)
{
ODBByteBuffer you = other as ODBByteBuffer;
byte[] myBytes = GetBytes();
byte[] yourBytes = you.GetBytes();
int dl = myBytes.Length - yourBytes.Length;
if (dl != 0)
return dl;
while (dl < myBytes.Length)
{
int d = myBytes[dl] - yourBytes[dl++];
if (d != 0)
return d;
}
return 0;
}
static ODBByteBuffer()
{
RegisterDeserializer(0x0800, (storageBytes, offset, length) => new ODBByteBuffer(storageBytes.Slice(offset, length)));
}
}
}

View File

@ -0,0 +1,28 @@
using System;
namespace ln.objects.catalog
{
public class ODBDouble : ODBValue
{
public ODBDouble(double value)
:base(0x18,value)
{}
public override byte[] Serialize() => BitConverter.GetBytes((double)Value);
protected override int compare(ODBEntity other)
{
double a = (double)Value;
double b = (double)(other as ODBValue).Value;
if (Math.Abs(a - b) < double.Epsilon)
return 0;
if (a < b)
return -1;
return 1;
}
static ODBDouble()
{
RegisterDeserializer(0x0018, (b, o, l) => new ODBDouble(BitConverter.ToDouble(b, o)));
}
}
}

View File

@ -0,0 +1,208 @@
using ln.type;
using System;
using System.Collections.Generic;
using System.IO;
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Text;
/**
* typeCode list
*
* 0x0000 ODBNull
* 0x0001 ODBStringValue
* 0x0002 ODBList
* 0x0003 ODBGuid
* 0x0004 ODBBool
* 0x0005 ODBObject
*
* 0x0010 ODBInteger
* 0x0011 ODBUInteger
* 0x0012 ODBLong
* 0x0013 ODBULong
*
* 0x0018 ODBDouble
*
* 0x0020 ODBTypedMapping
*
* 0x0800 ODBByteBuffer
*
* 0x1000 ODBDocument
* 0x1001 Document (ln.types.odb.ng)
*
*
*
**/
namespace ln.objects.catalog
{
public delegate ODBEntity ODBValueFactory(object value);
public delegate ODBEntity ODBDeserialize(byte[] storageBytes, int offset, int length);
/// <summary>
/// ODBEntity. The base of all ODB types.
/// </summary>
/// <remarks>The data model used by ODB can be described as follows:
///
/// Each piece of information to be used with ODB may be called an entity.
/// ODB provides support for several types of entities (e.g. numbers, strings, documents, lists,...)
/// An entity has an identity and a state.
/// Two entities of same type having the same identity are considered to be "equal".
/// Two entities of same type having the same identity may represent different states of the same entity. (different "Versions", e.g. an old and up to date
///
/// <c>ODBEntity</c> instances
/// - define identity through <c>Identity</c>
/// - may be seen as a "container" holding a certain state
///
/// <c>ODBValue</c> instances
/// extend <c>ODBEntity</c>
/// - implement <c>Identity</c> to return itself
/// - implement <c>CompareTo</c> as simple "substraction"
///
/// other <c>ODBEntity</c> subtypes
/// - implement a read-only <c>Identity</c> to return a <c>ODBvalue</c> instance
/// - implement <c>CompareTo</c> type specific without defined semantic meaning
///
///
/// </remarks>
///
public abstract class ODBEntity : IComparable<ODBEntity>
{
int storageTypeCode;
/// <summary>
/// Gets the identity of this Entity.
/// </summary>
/// <value>The identity.</value>
public virtual ODBValue Identity { get; }
/// <summary>
/// Independently clone this instance.
/// </summary>
/// <remarks>
/// For immutable values this returns the instance itself.
/// Complex ODBEntities will return a copy of themself that is completly independend of the source.
/// </remarks>
/// <returns>The clone.</returns>
public abstract ODBEntity Clone();
/// <summary>
/// Implements the internal comparison within the same subclass of ODBEntity.
/// </summary>
/// <returns>The compare.</returns>
/// <param name="other">Other.</param>
protected abstract int compare(ODBEntity other);
protected ODBEntity(int storageTypeCode)
{
this.storageTypeCode = storageTypeCode;
}
public int CompareTo(ODBEntity other)
{
if (storageTypeCode != other.storageTypeCode)
return storageTypeCode - other.storageTypeCode;
return compare(other);
}
public abstract byte[] Serialize();
public virtual void Serialize(BinaryWriter storage)
{
byte[] storageBytes = Serialize();
storage.Write(storageTypeCode);
storage.Write(storageBytes.Length);
storage.Write(storageBytes, 0, storageBytes.Length);
}
public override int GetHashCode() => Identity.GetHashCode();
public override bool Equals(object obj)
{
if (Equals(GetType(), obj.GetType()) && obj is ODBEntity)
return Equals(Identity, (obj as ODBEntity).Identity);
return false;
}
public static bool operator <(ODBEntity a, ODBEntity b) => a.CompareTo(b) < 0;
public static bool operator >(ODBEntity a, ODBEntity b) => a.CompareTo(b) > 0;
public static bool operator <=(ODBEntity a, ODBEntity b) => a.CompareTo(b) <= 0;
public static bool operator >=(ODBEntity a, ODBEntity b) => a.CompareTo(b) >= 0;
public static bool operator ==(ODBEntity a, ODBEntity b) => a is null ? b is null : a.CompareTo(b) == 0;
public static bool operator !=(ODBEntity a, ODBEntity b) => a is null ? !(b is null) : a.CompareTo(b) != 0;
//public static implicit operator ODBEntity(ValueType v)
//{
// return Mapper.Default.MapValue(v);
//}
//public static implicit operator ODBEntity(String v)
//{
// return Mapper.Default.MapValue(v);
//}
//public static ODBEntity FromNative(object v)
//{
// return Mapper.Default.MapValue(v);
//}
static Dictionary<int, ODBDeserialize> valueDeserializers = new Dictionary<int, ODBDeserialize>();
public static void RegisterDeserializer(int storageTypeCode, ODBDeserialize deserialize)
{
valueDeserializers.Add(storageTypeCode, deserialize);
}
public static ODBEntity Deserialize(byte[] buffer, ref int offset)
{
int storageTypeCode = BitConverter.ToInt32(buffer, offset);
int storageLength = BitConverter.ToInt32(buffer, offset + 4);
if (!valueDeserializers.ContainsKey(storageTypeCode))
throw new KeyNotFoundException(string.Format("StorageTypeCode 0x{0:x8} at offset 0x{1:x8}", storageTypeCode, offset));
ODBEntity value = valueDeserializers[storageTypeCode](buffer, offset + 8, storageLength);
offset += 8 + storageLength;
return value;
}
public static ODBEntity Deserialize(Stream stream)
{
int storageTypeCode = stream.ReadInteger();
int storageLength = stream.ReadInteger();
byte[] b = new byte[storageLength];
stream.Read(b, 0, storageLength);
if (valueDeserializers.ContainsKey(storageTypeCode))
return valueDeserializers[storageTypeCode](b, 0, storageLength);
else
throw new FormatException("wrong storage type code");
}
public override string ToString()
{
return string.Format("[{0} Identity={1}]", GetType().Name, Identity);
}
public string TreeString => ToTreeString(0);
public abstract string ToTreeString(int indent);
static ODBEntity()
{
RuntimeHelpers.RunClassConstructor(typeof(ODBNull).TypeHandle);
RuntimeHelpers.RunClassConstructor(typeof(ODBObject).TypeHandle);
RuntimeHelpers.RunClassConstructor(typeof(ODBList).TypeHandle);
RuntimeHelpers.RunClassConstructor(typeof(ODBStringValue).TypeHandle);
RuntimeHelpers.RunClassConstructor(typeof(ODBInteger).TypeHandle);
RuntimeHelpers.RunClassConstructor(typeof(ODBUInteger).TypeHandle);
RuntimeHelpers.RunClassConstructor(typeof(ODBLong).TypeHandle);
RuntimeHelpers.RunClassConstructor(typeof(ODBULong).TypeHandle);
RuntimeHelpers.RunClassConstructor(typeof(ODBDouble).TypeHandle);
RuntimeHelpers.RunClassConstructor(typeof(ODBGuid).TypeHandle);
RuntimeHelpers.RunClassConstructor(typeof(ODBBool).TypeHandle);
RuntimeHelpers.RunClassConstructor(typeof(ODBByteBuffer).TypeHandle);
}
}
}

24
catalog/ODBGuid.cs 100644
View File

@ -0,0 +1,24 @@
using ln.type;
using System;
using System.Linq;
namespace ln.objects.catalog
{
public class ODBGuid : ODBValue
{
public ODBGuid()
: base(0x03, Guid.NewGuid())
{ }
public ODBGuid(Guid guid)
: base(0x03, guid)
{ }
public override byte[] Serialize() => ((Guid)Value).ToByteArray();
protected override int compare(ODBEntity other) => ((Guid)Value).CompareTo((other as ODBGuid).Value);
static ODBGuid()
{
RegisterDeserializer(0x03, (b, o, l) => new ODBGuid(new Guid(b.Slice(o, 16))));
}
}
}

View File

@ -0,0 +1,42 @@
using System;
using System.Runtime.CompilerServices;
namespace ln.objects.catalog
{
public class ODBInteger : ODBValue
{
public ODBInteger(int i)
: base(0x10, i)
{ }
public override byte[] Serialize() => BitConverter.GetBytes((int)Value);
protected override int compare(ODBEntity other) => (int)Value - (int)(other as ODBValue).Value;
static ODBInteger()
{
RegisterDeserializer(0x10, (b, o, l) => new ODBInteger(BitConverter.ToInt32(b, o)));
}
}
public class ODBUInteger : ODBValue
{
public ODBUInteger(uint i)
: base(0x11, i)
{ }
public override byte[] Serialize() => BitConverter.GetBytes((uint)Value);
protected override int compare(ODBEntity other)
{
long d = Convert.ToInt64((uint)Value) - Convert.ToInt64((uint)(other as ODBValue).Value);
if (d == 0)
return 0;
if (d < 0)
return -1;
return 1;
}
static ODBUInteger()
{
RegisterDeserializer(0x11, (b, o, l) => new ODBUInteger(BitConverter.ToUInt32(b, o)));
}
}
}

132
catalog/ODBList.cs 100644
View File

@ -0,0 +1,132 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Collections;
using System.Text;
using ln.type;
namespace ln.objects.catalog
{
public class ODBList : ODBEntity, IEnumerable<ODBEntity>
{
Guid identity = Guid.NewGuid();
public override ODBValue Identity => new ODBGuid(identity);
List<ODBEntity> items = new List<ODBEntity>();
public ODBList()
: base(0x02)
{
}
public ODBList(byte[] bytes, int offset, int length)
: this()
{
MemoryStream stream = new MemoryStream(bytes, offset, length);
identity = new Guid(stream.ReadBytes(16));
int nItems = stream.ReadInteger();
for (int n = 0; n < nItems; n++)
items.Add(Deserialize(stream));
}
public ODBEntity this[int i]
{
get => items[i];
set => items[i] = value;
}
public void AddRange(IEnumerable<ODBEntity> values)
{
foreach (ODBEntity value in values)
Add(value);
}
public void Add(ODBEntity value)
{
items.Add(value);
}
public void Remove(ODBEntity value)
{
items.Remove(value);
}
public void RemoveAt(int i)
{
items.RemoveAt(i);
}
public int Count => items.Count;
public override ODBEntity Clone()
{
ODBList clone = new ODBList();
clone.identity = identity;
foreach (ODBEntity item in items)
clone.items.Add(item.Clone());
return clone;
}
public override byte[] Serialize()
{
MemoryStream stream = new MemoryStream();
BinaryWriter writer = new BinaryWriter(stream);
writer.Write(identity.ToByteArray());
writer.Write(items.Count);
foreach (ODBEntity value in items)
value.Serialize(writer);
return stream.ToArray();
}
protected override int compare(ODBEntity other)
{
ODBList you = other as ODBList;
int d = Count - you.Count;
if (d != 0)
return d;
for (int n = 0; n < Count; n++)
{
d = this[n].CompareTo(you[n]);
if (d != 0)
return d;
}
return 0;
}
public IEnumerator<ODBEntity> GetEnumerator()
{
return items.GetEnumerator();
}
IEnumerator IEnumerable.GetEnumerator()
{
return items.GetEnumerator();
}
static ODBList()
{
RegisterDeserializer(0x02, (b, o, l) => new ODBList(b, o, l));
}
public override string ToTreeString(int indent)
{
indent += 2;
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.AppendFormat("{0} Identity={1} Count={2}", GetType().Name, Identity, Count);
foreach (ODBEntity entity in items)
{
stringBuilder.AppendLine();
stringBuilder.AppendFormat("{0}{1}", new string(' ', indent), entity.ToTreeString(indent));
}
return stringBuilder.ToString();
}
}
}

63
catalog/ODBLong.cs 100644
View File

@ -0,0 +1,63 @@
using System;
using System.Runtime.CompilerServices;
namespace ln.objects.catalog
{
public class ODBLong : ODBValue
{
public ODBLong(long value)
: base(0x12, value)
{ }
public override byte[] Serialize() => BitConverter.GetBytes((long)Value);
protected override int compare(ODBEntity other)
{
long a, b;
a = (long)Value;
b = (long)(other as ODBValue).Value;
long d = a - b;
if (d == 0)
return 0;
if (d < 0)
return -1;
return 1;
}
public static implicit operator DateTime(ODBLong l) => DateTimeOffset.FromUnixTimeMilliseconds((long)l.Value).DateTime;
public static implicit operator TimeSpan(ODBLong l) => TimeSpan.FromMilliseconds((long)l.Value);
static ODBLong()
{
RegisterDeserializer(0x12, (b, o, l) => new ODBLong(BitConverter.ToInt64(b, o)));
}
}
public class ODBULong : ODBValue
{
public ODBULong(ulong value)
: base(0x13, value)
{ }
public override byte[] Serialize() => BitConverter.GetBytes((ulong)Value);
protected override int compare(ODBEntity other)
{
ulong a = (ulong)Value;
ulong b = (ulong)(other as ODBValue).Value;
if (a == b)
return 0;
if (a < b)
return -1;
return 1;
}
static ODBULong()
{
RegisterDeserializer(0x13, (b, o, l) => new ODBULong(BitConverter.ToUInt64(b, o)));
}
}
}

24
catalog/ODBNull.cs 100644
View File

@ -0,0 +1,24 @@
using System;
namespace ln.objects.catalog
{
public class ODBNull : ODBValue
{
public static readonly ODBNull Instance = new ODBNull();
private ODBNull()
: base(0x00, null)
{ }
public override byte[] Serialize() => new byte[0];
protected override int compare(ODBEntity other) => 0;
public override int GetHashCode() => 0;
public override bool Equals(object obj) => ReferenceEquals(this, obj);
static ODBNull()
{
RegisterDeserializer(0x00, (b, o, l) => Instance);
}
}
}

View File

@ -0,0 +1,152 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
namespace ln.objects.catalog
{
public class ODBObject : ODBEntity
{
private Dictionary<ODBEntity, ODBEntity> properties = new Dictionary<ODBEntity, ODBEntity>();
public ODBObject()
:base(0x0005)
{ }
public ODBObject(byte[] bytes)
: this(bytes, 0, bytes.Length)
{}
public ODBObject(byte[] bytes,int offset,int length)
:this()
{
int endOffset = offset + length;
int nProps = BitConverter.ToInt32(bytes, offset);
offset += 4;
for (int n=0;n<nProps;n++)
{
ODBEntity propName = ODBEntity.Deserialize(bytes,ref offset);
ODBEntity propValue = ODBEntity.Deserialize(bytes, ref offset);
properties.Add(propName, propValue);
}
if (offset > endOffset)
throw new FormatException("object deserialization read behind end of buffer");
}
public ODBEntity this[ODBEntity propName]
{
get
{
if (properties.ContainsKey(propName))
return properties[propName];
return ODBNull.Instance;
}
set
{
if (ODBNull.Instance.Equals(value))
{
if (properties.ContainsKey(propName))
properties.Remove(propName);
}
else
{
properties[propName] = value;
}
}
}
public ODBEntity this[string propName]
{
get => this[new ODBStringValue(propName)];
set => this[new ODBStringValue(propName)] = value;
}
public IEnumerable<ODBEntity> Keys => properties.Keys;
public bool Contains(string propName) => Contains(new ODBStringValue(propName));
public bool Contains(ODBEntity propName)
{
return !ODBNull.Instance.Equals(this[propName]);
}
public override ODBEntity Clone()
{
ODBObject clone = new ODBObject();
foreach (ODBEntity fieldName in properties.Keys)
{
clone[fieldName] = this[fieldName].Clone();
}
return clone;
}
public void CloneTo(ODBObject target)
{
target.properties.Clear();
foreach (ODBEntity fieldName in properties.Keys)
{
target[fieldName] = this[fieldName].Clone();
}
}
public override byte[] Serialize()
{
MemoryStream stream = new MemoryStream();
BinaryWriter writer = new BinaryWriter(stream);
writer.Write(properties.Count);
foreach (ODBEntity propName in properties.Keys)
{
ODBEntity propValue = properties[propName];
propName.Serialize(writer);
propValue.Serialize(writer);
}
return stream.ToArray();
}
public override string ToString()
{
return String.Format("[Object {0}]", String.Join(" ",properties.Select(kv=> String.Format("{0}={1}",kv.Key,kv.Value))));
}
public override string ToTreeString(int indent)
{
indent += 2;
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.AppendFormat("{0} Identity={1} Count={2}", GetType().Name, Identity, properties.Count);
foreach (ODBValue key in properties.Keys)
{
stringBuilder.AppendLine();
stringBuilder.AppendFormat("{0}{1,-32}: {2}", new String(' ', indent), key, properties[key].ToTreeString(indent));
}
return stringBuilder.ToString();
}
protected override int compare(ODBEntity e)
{
ODBObject other = e as ODBObject;
ODBEntity[] keys = Keys.Union(other.Keys).ToArray();
foreach (ODBEntity key in keys)
{
ODBEntity mine = this[key];
ODBEntity yours = other[key];
int c = mine.CompareTo(yours);
if (c != 0)
return c;
}
return 0;
}
static ODBObject()
{
RegisterDeserializer(0x0005, (b,o,l) => new ODBObject(b,o,l));
}
}
}

View File

@ -0,0 +1,22 @@
using System;
using System.Text;
using System.Globalization;
namespace ln.objects.catalog
{
public class ODBStringValue : ODBValue
{
public ODBStringValue(string s)
: base(0x01, s)
{ }
public override byte[] Serialize() => Encoding.UTF8.GetBytes((string)Value);
protected override int compare(ODBEntity other) => ((string)Value).CompareTo((other as ODBValue).Value);
static ODBStringValue()
{
RegisterDeserializer(0x01, (b, o, l) => new ODBStringValue(Encoding.UTF8.GetString(b, o, l)));
}
}
}

View File

@ -0,0 +1,84 @@
// /**
// * File: ODBTypedValue.cs
// * Author: haraldwolff
// *
// * This file and it's content is copyrighted by the Author and / or copyright holder.
// * Any use wihtout proper permission is illegal and may lead to legal actions.
// *
// *
// **/
using System;
using System.IO;
namespace ln.objects.catalog
{
//public class ODBTypedValue : ODBEntity
//{
// public ODBEntity ODBValue { get; private set; }
// public Type TargetType { get; private set; }
// public override uint AsUInt => ODBValue.AsUInt;
// internal ODBTypedValue()
// : base(0x0020)
// {
// }
// public ODBTypedValue(Type targetType,ODBEntity value)
// : this()
// {
// TargetType = targetType;
// ODBValue = value;
// }
// public ODBTypedValue(byte[] bytes,int offset,int length)
// : this()
// {
// String aname = ODBEntity.Deserialize(bytes, ref offset).AsString;
// String tname = ODBEntity.Deserialize(bytes, ref offset).AsString;
// ODBValue = ODBEntity.Deserialize(bytes, ref offset);
// TargetType = Type.GetType(String.Format("{0}, {1}", tname, aname));
// }
// public override int CompareLevel => 126;
// public override int CompareInType(ODBEntity other)
// {
// ODBTypedValue typedValue = other as ODBTypedValue;
// return ODBValue.CompareTo(typedValue.ODBValue);
// }
// public override byte[] GetStorageBytes()
// {
// MemoryStream stream = new MemoryStream();
// BinaryWriter writer = new BinaryWriter(stream);
// ODBStringValue aname = TargetType.Assembly.GetName().Name;
// ODBStringValue tname = TargetType.FullName;
// aname.Write(writer);
// tname.Write(writer);
// ODBValue.Write(writer);
// return stream.ToArray();
// }
// public override int GetHashCode()
// {
// return ODBValue.GetHashCode();
// }
// public override bool Equals(object obj)
// {
// if (obj is ODBTypedValue)
// {
// ODBTypedValue typedValue = obj as ODBTypedValue;
// return ODBValue.Equals(typedValue.ODBValue);
// }
// return false;
// }
// static ODBTypedValue()
// {
// RegisterDeserializer(0x0020, (b, o, l) => new ODBTypedValue(b,o,l));
// }
//}
}

View File

@ -0,0 +1,75 @@
// /**
// * File: ODBValue.cs
// * Author: haraldwolff
// *
// * This file and it's content is copyrighted by the Author and / or copyright holder.
// * Any use wihtout proper permission is illegal and may lead to legal actions.
// *
// *
// **/
using System;
using System.Text;
namespace ln.objects.catalog
{
/// <summary>
/// The base type of all immutable ODB types
/// </summary>
public abstract class ODBValue : ODBEntity
{
public object Value { get; protected set; }
public override ODBValue Identity => this;
public override ODBEntity Clone() => this;
protected ODBValue(int storageTypeCode, object value)
: base(storageTypeCode)
{
Value = value;
}
public override int GetHashCode() => Value.GetHashCode();
public override bool Equals(object obj)
{
if (GetType().Equals(obj.GetType()))
{
if (obj is ODBValue)
return Equals(Value, (obj as ODBValue).Value);
if (obj is ODBEntity)
return Equals(Identity, (obj as ODBEntity).Identity);
}
return false;
}
public override string ToString()
{
return string.Format("[{0} Value={1}]", GetType().Name, Value);
}
public override string ToTreeString(int indent)
{
return string.Format("{0} Value={1}", GetType().Name, Value);
}
//public virtual string AsString => As<string>();
//public virtual bool AsBool => As<bool>();
//public virtual byte AsByte => As<byte>();
//public virtual char AsChar => (char)Value;
//public virtual short AsShort => Convert.ToInt16(Value);
//public virtual int AsInt => Convert.ToInt32(Value);
//public virtual long AsLong => Convert.ToInt64(Value);
//public virtual ushort AsUShort => Convert.ToUInt16(Value);
//public virtual uint AsUInt => (uint)Value;
//public virtual ulong AsULong => (ulong)Value;
//public virtual double AsDouble => (double)Value;
//public virtual float AsFloat => (float)Value;
//public virtual Guid AsGuid => (Guid)Value;
//public virtual DateTime AsDateTime => (DateTime)Mapper.Default.UnmapValue(typeof(DateTime), this);
//public virtual TimeSpan AsTimeSpan => (TimeSpan)Mapper.Default.UnmapValue(typeof(TimeSpan), this);
}
}

View File

@ -0,0 +1,105 @@
using System;
using System.Collections;
using System.Collections.Generic;
namespace ln.objects.collections
{
public class LazyList<T> : IList<T> where T:class
{
ObjectStore objectStore;
List<ListItem> listItems = new List<ListItem>();
public LazyList(ObjectStore objectStore)
:this(objectStore, new Guid[0]) { }
public LazyList(ObjectStore objectStore,Guid[] initialItems)
{
this.objectStore = objectStore;
}
public T this[int index]
{
get
{
ListItem listItem = listItems[index];
if (Object.ReferenceEquals(null, listItem.Value))
{
if (Guid.Empty.Equals(listItem.UID))
return null;
listItem.Value = objectStore.LoadObject<T>(listItem.UID);
}
return listItem.Value;
}
set
{
ListItem listItem = listItems[index];
listItem.Value = value;
listItem.UID = Guid.Empty;
}
}
public int Count => listItems.Count;
public bool IsReadOnly => false;
public void Add(T item)=> listItems.Add(new ListItem() { Value = item });
public void Clear() => listItems.Clear();
public bool Contains(T item)
{
Guid uid = objectStore.GetObjectUID(item);
foreach (ListItem listItem in listItems)
if (listItem.UID.Equals(uid) || Object.ReferenceEquals(item, listItem.Value))
return true;
return false;
}
public void CopyTo(T[] array, int arrayIndex)
{
for (int i = 0; i < Count; i++)
array[i + arrayIndex] = this[i];
}
IEnumerator IEnumerable.GetEnumerator() => GetEnumerator();
public IEnumerator<T> GetEnumerator()
{
for (int i = 0; i < Count; i++)
yield return this[i];
}
public int IndexOf(T item)
{
Guid uid = objectStore.GetObjectUID(item);
for (int i = 0; i < Count; i++)
{
ListItem listItem = listItems[i];
if (listItem.UID.Equals(uid) || Object.ReferenceEquals(item, listItem.Value))
return i;
}
return -1;
}
public void Insert(int index, T item) => listItems.Insert(index, new ListItem() { Value = item });
public bool Remove(T item)
{
Guid uid = objectStore.GetObjectUID(item);
for (int i = 0; i < Count; i++)
{
ListItem listItem = listItems[i];
if (listItem.UID.Equals(uid) || Object.ReferenceEquals(item, listItem.Value))
{
listItems.RemoveAt(i);
return true;
}
}
return false;
}
public void RemoveAt(int index) => listItems.RemoveAt(index);
class ListItem
{
public Guid UID; // Identifier of reference in objectStore
public T Value; // Loaded Reference
}
}
}

25
index/Index.cs 100644
View File

@ -0,0 +1,25 @@
using System;
using System.Collections.Generic;
using System.Text;
namespace ln.objects.index
{
public abstract class Index
{
public Index()
{
}
public abstract void Reindex(Guid uid, object value);
public abstract void Remove(Guid uid);
public abstract void Match(Func<object, bool> criterion, ISet<Guid> matches);
public abstract void Clear(); // Reset this Index as it was newly created
public abstract bool TrySerializeIndex(out byte[] serializedIndex); // Serialize current state of index to <serializedIndex>
public abstract bool TryDeserializeIndex(byte[] serializedIndex); // Deserialize current state from <serializedIndex>
}
}

55
index/IndexLeaf.cs 100644
View File

@ -0,0 +1,55 @@
using System;
using System.Collections.Generic;
using System.Text;
namespace ln.objects.index
{
public class IndexLeaf
{
public Type LeafType { get; set; }
public string LeafName { get; set; }
public Index Index { get; set; }
Dictionary<string, IndexLeaf> leafs = new Dictionary<string, IndexLeaf>();
Func<object, object> leafGetter;
public IndexLeaf()
{
}
public IndexLeaf(Type valueType, string leafName, Func<object, object> getter)
{
LeafType = valueType;
LeafName = leafName;
leafGetter = getter;
}
public IndexLeaf GetLeaf(string path) => leafs[path];
public void AddLeaf(IndexLeaf indexLeaf) => leafs.Add(indexLeaf.LeafName, indexLeaf);
public void RemoveLeaf(string leafName) => leafs.Remove(leafName);
public void Reindex(Guid uid, object value)
{
Index?.Reindex(uid, value);
foreach (IndexLeaf indexLeaf in leafs.Values)
indexLeaf.Reindex(uid, indexLeaf.leafGetter(value));
}
public void Remove(Guid uid)
{
Index?.Remove(uid);
foreach (IndexLeaf indexLeaf in leafs.Values)
indexLeaf.Remove(uid);
}
public void Clear()
{
Index?.Clear();
foreach (IndexLeaf indexLeaf in leafs.Values)
indexLeaf.Clear();
}
public void Match(Func<object, bool> criterion, ISet<Guid> matches) => Index?.Match(criterion, matches);
}
}

View File

@ -0,0 +1,57 @@
using ln.collections;
using System;
using System.Collections.Generic;
using System.Text;
namespace ln.objects.index
{
class SimpleIndex<T> : Index
{
BTreeValueSet<T, Guid> index = new BTreeValueSet<T, Guid>();
BTree<Guid, T> reverseIndex = new BTree<Guid, T>();
public SimpleIndex()
{
}
public override void Clear()
{
index.Clear();
reverseIndex.Clear();
}
public override void Match(Func<object, bool> criterion, ISet<Guid> matches)
{
foreach (T ivalue in index.Keys)
{
if (criterion(ivalue))
matches.UnionWith(index[ivalue]);
}
}
public override void Reindex(Guid uid, object value)
{
Remove(uid);
index.Add((T)value, uid);
}
public override void Remove(Guid uid)
{
if (reverseIndex.ContainsKey(uid))
{
index.TryRemove(reverseIndex[uid], uid);
reverseIndex.TryRemove(uid);
}
}
public override bool TryDeserializeIndex(byte[] serializedIndex)
{
throw new NotImplementedException();
}
public override bool TrySerializeIndex(out byte[] serializedIndex)
{
throw new NotImplementedException();
}
}
}

20
ln.objects.csproj 100644
View File

@ -0,0 +1,20 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>netcoreapp3.1</TargetFramework>
</PropertyGroup>
<ItemGroup>
<Compile Remove="ng\**" />
<EmbeddedResource Remove="ng\**" />
<None Remove="ng\**" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\ln.collections\ln.collections.csproj" />
<ProjectReference Include="..\ln.json\ln.json.csproj" />
<ProjectReference Include="..\ln.logging\ln.logging.csproj" />
<ProjectReference Include="..\ln.type\ln.type.csproj" />
</ItemGroup>
</Project>

189
ng/Document.cs 100644
View File

@ -0,0 +1,189 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using ln.objects.catalog;
namespace ln.types.odb.ng
{
public class Document : ODBEntity
{
public override ODBValue Identity => new ODBGuid(ID);
private Dictionary<ODBEntity, ODBEntity> properties = new Dictionary<ODBEntity, ODBEntity>();
public Document()
:base(0x1001)
{
ID = Guid.NewGuid();
}
public Document(Guid id)
:base(0x1001)
{
ID = id;
}
public Guid ID { get; }
public DateTime StorageTimeStamp { get; set; }
public Document(byte[] bytes)
: this(bytes, 0, bytes.Length)
{}
public Document(byte[] bytes,int offset,int length)
:this(new Guid(bytes.Slice(offset, 16)))
{
int endOffset = offset + length;
offset += 16; // GUID (!!!) -> this(...)
int nProps = BitConverter.ToInt32(bytes, offset);
offset += 4;
for (int n=0;n<nProps;n++)
{
ODBEntity propName = ODBEntity.Deserialize(bytes,ref offset);
ODBEntity propValue = ODBEntity.Deserialize(bytes, ref offset);
properties.Add(propName, propValue);
}
if (offset > endOffset)
throw new FormatException("Document deserialization read behind end of buffer");
}
public ODBEntity this[ODBEntity propName]
{
get
{
if (properties.ContainsKey(propName))
return properties[propName];
return ODBNull.Instance;
}
set
{
if (ODBNull.Instance.Equals(value))
{
if (properties.ContainsKey(propName))
properties.Remove(propName);
}
else
{
properties[propName] = value;
}
}
}
public ODBEntity this[string propName]
{
get => this[new ODBStringValue(propName)];
set => this[new ODBStringValue(propName)] = value;
}
public IEnumerable<ODBEntity> Keys => properties.Keys;
public bool Contains(string propName) => Contains(new ODBStringValue(propName));
public bool Contains(ODBEntity propName)
{
return !ODBNull.Instance.Equals(this[propName]);
}
public override ODBEntity Clone()
{
Document clone = new Document(ID);
clone.StorageTimeStamp = StorageTimeStamp;
foreach (ODBEntity fieldName in properties.Keys)
{
clone[fieldName] = this[fieldName].Clone();
}
return clone;
}
public void CloneTo(Document target)
{
target.properties.Clear();
target.StorageTimeStamp = StorageTimeStamp;
foreach (ODBEntity fieldName in properties.Keys)
{
target[fieldName] = this[fieldName].Clone();
}
}
public override byte[] GetStorageBytes()
{
MemoryStream stream = new MemoryStream();
BinaryWriter writer = new BinaryWriter(stream);
writer.Write(ID.ToByteArray());
writer.Write(properties.Count);
foreach (ODBEntity propName in properties.Keys)
{
ODBEntity propValue = properties[propName];
propName.Write(writer);
propValue.Write(writer);
}
return stream.ToArray();
}
public override string ToString()
{
return String.Format("[Document ID={0} {1}]", ID.ToString(),String.Join(" ",properties.Select(kv=> String.Format("{0}={1}",kv.Key,kv.Value))));
}
public override string ToTreeString(int indent)
{
indent += 2;
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.AppendFormat("{0} Identity={1} Count={2}", GetType().Name, Identity, properties.Count);
foreach (ODBValue key in properties.Keys)
{
stringBuilder.AppendLine();
stringBuilder.AppendFormat("{0}{1,-32}: {2}", new String(' ', indent), key, properties[key].ToTreeString(indent));
}
return stringBuilder.ToString();
}
public override int GetHashCode()
{
return ID.GetHashCode();
}
public override bool Equals(object obj)
{
if (Equals(GetType(), obj.GetType()) && (obj is Document))
{
Document you = obj as Document;
return ID.Equals(you.ID);
}
return false;
}
protected override int compare(ODBEntity e)
{
Document other = e as Document;
ODBEntity[] keys = Keys.Union(other.Keys).ToArray();
foreach (ODBEntity key in keys)
{
ODBEntity mine = this[key];
ODBEntity yours = other[key];
int c = mine.CompareTo(yours);
if (c != 0)
return c;
}
return 0;
}
public override T As<T>() => (T)Mapper.Default.UnmapValue(typeof(T), this);
static Document()
{
RegisterDeserializer(0x1001, (b,o,l) => new Document(b,o,l));
}
}
}

10
ng/IODBMapping.cs 100644
View File

@ -0,0 +1,10 @@
using System;
using ln.objects.catalog;
namespace ln.types.odb.ng
{
public interface IODBMapping
{
ODBEntity MapValue(Mapper mapper, object value);
object UnmapValue(Mapper mapper, ODBEntity oval);
}
}

View File

@ -0,0 +1,56 @@
using System;
using ln.types.collections;
namespace ln.types.odb.ng
{
public class IdentityCache
{
WeakKeyReferenceDictionary<object, Guid> reverseCache = new WeakKeyReferenceDictionary<object, Guid>();
WeakValueDictionary<Guid, object> forwardCache = new WeakValueDictionary<Guid, object>();
public IdentityCache()
{
}
public bool TryGetValue(Guid identity,out object o)
{
lock (this)
{
return forwardCache.TryGetValue(identity, out o);
}
}
public bool TryGetValue(Guid identity, out object o, Func<object> instantiator)
{
lock (this)
{
if (!TryGetValue(identity, out o))
{
o = instantiator();
forwardCache.Add(identity, o);
reverseCache.Add(o, identity);
}
return false;
}
}
public bool TryGetIdentity(object o,out Guid identity)
{
lock (this)
{
return reverseCache.TryGetValue(o, out identity);
}
}
public void Ensure(Guid identity,object o)
{
lock (this)
{
if (!forwardCache.ContainsKey(identity))
forwardCache.Add(identity, o);
if (!reverseCache.ContainsKey(o))
reverseCache.Add(o, identity);
}
}
}
}

220
ng/Mapper.API.cs 100644
View File

@ -0,0 +1,220 @@
using ln.types.btree;
using System.Collections.Generic;
using System;
using ln.types.odb.ng.index;
using System.Linq;
using System.Collections;
using ln.types.odb.ng.storage;
using ln.types.odb.ng.mappings;
using ln.objects.catalog;
namespace ln.types.odb.ng
{
public partial class Mapper
{
BTree<Guid, CachedObject> forwardCache = new BTree<Guid, CachedObject>();
Dictionary<object, CachedObject> reverseCache = new Dictionary<object, CachedObject>();
public IEnumerable<T> Load<T>() => Load(typeof(T),false).Cast<T>();
public IEnumerable<T> Load<T>(bool refresh) => Load(typeof(T),refresh).Cast<T>();
public IEnumerable Load(Type type) => Load(type, false);
public IEnumerable Load(Type type,bool refresh)
{
return new MappedObjectEnumeration(this, type, GetDocumentIDs(type),refresh);
}
public object[] LoadArray(Type type) => LoadArray(type, false);
public object[] LoadArray(Type type, bool refresh)
{
MappedObjectEnumeration mappedObjectEnumeration = new MappedObjectEnumeration(this, type, GetDocumentIDs(type), refresh);
object[] objects = new object[mappedObjectEnumeration.Count];
int n = 0;
foreach (object o in mappedObjectEnumeration)
objects[n++] = o;
return objects;
}
public T Load<T>(Guid documentID) => (T)Load(typeof(T), documentID, false);
public T Load<T>(Guid documentID,bool refresh) => (T)Load(typeof(T), documentID, refresh);
public object Load(Type type, Guid documentID) => Load(type, documentID, false);
public object Load(Type type, Guid documentID,bool refresh)
{
lock (this)
{
if (forwardCache.ContainsKey(documentID))
{
CachedObject cachedObject = forwardCache[documentID];
if (refresh)
Refresh(type, cachedObject.Instance);
return cachedObject.Instance;
}
else
{
IStorage storage = StorageContainer.GetStorage(type.FullName);
Document document = storage.Load(documentID);
object instance = ObjectMapping.UnmapValue(this, document);
CachedObject cachedObject = new CachedObject(document, instance);
forwardCache.Add(cachedObject.Document.ID, cachedObject);
reverseCache.Add(cachedObject.Instance, cachedObject);
return cachedObject.Instance;
}
}
}
public IEnumerable<T> Load<T>(Query query) => Load(typeof(T), query, false).Cast<T>();
public IEnumerable<T> Load<T>(Query query,bool refresh) => Load(typeof(T), query, refresh).Cast<T>();
public IEnumerable Load(Type type, Query query) => Load(type, query, false);
public IEnumerable Load(Type type,Query query,bool refresh)
{
IEnumerable<Guid> matchedIDs = GetDocumentIDs(type,query);
return new MappedObjectEnumeration(this, type, matchedIDs,refresh);
}
public bool Refresh<T>(T instance) => Refresh(typeof(T), instance);
public bool Refresh(Type type,object instance)
{
if (!reverseCache.TryGetValue(instance, out CachedObject cachedObject))
return false;
IStorage storage = StorageContainer.GetStorage(type.FullName);
if (storage.Refresh(cachedObject.Document))
{
(GetMapping(type) as ClassMapping).Apply(this,cachedObject.Document,cachedObject.Instance);
return true;
}
return false;
}
public void Save<T>(T instance) => Save(typeof(T), instance);
public void Save(Type type, object instance)
{
lock (this)
{
IStorage storage = GetStorage(type);
CachedObject cachedObject;
Document document;
if (reverseCache.ContainsKey(instance))
{
cachedObject = reverseCache[instance];
document = (GetMapping(type) as mappings.ClassMapping).MapDocument(this, cachedObject.Document.ID, instance);
storage.Save(document);
cachedObject.Document = document;
}
else
{
document = (GetMapping(type) as mappings.ClassMapping).MapDocument(this, Guid.NewGuid(), instance) as Document;
cachedObject = new CachedObject(document, instance);
storage.Save(document);
forwardCache.Add(cachedObject.Document.ID, cachedObject);
reverseCache.Add(instance, cachedObject);
}
}
}
public void Delete<T>(T instance) => Delete(typeof(T),instance);
public void Delete(Type type,object instance)
{
lock (this)
{
if (reverseCache.ContainsKey(instance))
{
CachedObject cachedObject = reverseCache[instance];
reverseCache.Remove(instance);
forwardCache.Remove(cachedObject.Document.ID);
GetStorage(type).Delete(cachedObject.Document.ID);
}
}
}
public void Delete<T>(Guid documentID) => Delete(typeof(T), documentID);
public void Delete(Type type, Guid documentID)
{
lock (this)
{
if (forwardCache.ContainsKey(documentID))
{
CachedObject cachedObject = forwardCache[documentID];
reverseCache.Remove(cachedObject.Instance);
forwardCache.Remove(cachedObject.Document.ID);
}
GetStorage(type).Delete(documentID);
}
}
public IEnumerable<Guid> GetDocumentIDs<T>() => GetDocumentIDs(typeof(T));
public IEnumerable<Guid> GetDocumentIDs(Type type)
{
IStorage storage = StorageContainer.GetStorage(type.FullName);
return storage.GetDocumentIDs();
}
public IEnumerable<Guid> GetDocumentIDs<T>(string path, Predicate<ODBEntity> predicate) => GetDocumentIDs(typeof(T), path, predicate);
public IEnumerable<Guid> GetDocumentIDs(Type type, string path, Predicate<ODBEntity> predicate)
{
IStorage storage = StorageContainer.GetStorage(type.FullName);
return storage.GetDocumentIDs(path,predicate);
}
public IEnumerable<Guid> GetDocumentIDs<T>(Query query) => GetDocumentIDs(typeof(T), query);
public IEnumerable<Guid> GetDocumentIDs(Type type,Query query)
{
IStorage storage = StorageContainer.GetStorage(type.FullName);
return query.Execute(storage);
}
public void EnsureIndex<T>(string path) => EnsureIndex(typeof(T), path);
public void EnsureIndex(Type type,string path)
{
path = IndexPath.TranslatePropertyPath(type, path);
IStorage storage = StorageContainer.GetStorage(type.FullName);
storage.EnsureIndex(path);
}
struct CachedObject
{
public object Instance;
public Document Document;
public CachedObject(Document document, object instance)
{
Document = document;
Instance = instance;
}
}
class MappedObjectEnumeration : IEnumerable
{
Mapper mapper;
Type type;
IEnumerable<Guid> documentIDs;
bool refresh;
public int Count => documentIDs.Count();
public MappedObjectEnumeration(Mapper mapper,Type type,IEnumerable<Guid> documentIDs,bool refresh)
{
this.mapper = mapper;
this.type = type;
this.documentIDs = documentIDs;
this.refresh = refresh;
}
public IEnumerator GetEnumerator()
{
foreach (Guid documentID in documentIDs)
yield return mapper.Load(type, documentID, refresh);
}
}
}
}

302
ng/Mapper.cs 100644
View File

@ -0,0 +1,302 @@
using System;
using System.Collections.Generic;
using System.Reflection;
using System.Linq;
using System.Collections;
using ln.types.net;
using ln.types.odb.ng.storage;
using ln.collections;
using ln.types.odb.ng.mappings;
using ln.types.odb.ng.storage.fs;
using ln.objects.catalog;
namespace ln.types.odb.ng
{
public delegate ODBEntity ODBMap(Mapper mapper, object value);
public delegate object ODBUnmap(Mapper mapper, ODBEntity oval);
public delegate ODBEntity ODBMap<T>(Mapper mapper, T value);
public delegate T ODBUnmap<T>(Mapper mapper, ODBEntity oval);
public partial class Mapper : IDisposable
{
public static Mapper Default { get; set; } = new Mapper((IStorageContainer)null);
public IStorageContainer StorageContainer { get; private set; }
public IdentityCache IdentityCache { get; } = new IdentityCache();
Dictionary<Type, IODBMapping> mappings = new Dictionary<Type, IODBMapping>();
mappings.ObjectMapping ObjectMapping { get; }
public Mapper(string basePath)
:this(new FSStorageContainer(basePath))
{
StorageContainer.Open();
}
public Mapper(IStorageContainer storageContainer)
{
if (Default?.StorageContainer == null)
Default = this;
this.StorageContainer = storageContainer;
RegisterMapping<string>(
(mapper, value) => new ODBStringValue(value),
(mapper, oval) => oval.As<String>()
);
RegisterMapping<int>(
(mapper, value) => new ODBInteger(value),
(mapper, oval) => oval.As<int>()
);
RegisterMapping<short>(
(mapper, value) => new ODBInteger(value),
(mapper, oval) => oval.As<short>()
);
RegisterMapping<byte>(
(mapper, value) => new ODBInteger(value),
(mapper, oval) => oval.As<Byte>()
);
RegisterMapping<uint>(
(mapper, value) => new ODBUInteger(value),
(mapper, oval) => oval.As<uint>()
);
RegisterMapping<ushort>(
(mapper, value) => new ODBUInteger(value),
(mapper, oval) => oval.As<ushort>()
);
RegisterMapping<char>(
(mapper, value) => new ODBUInteger(value),
(mapper, oval) => oval.As<Char>()
);
RegisterMapping<double>(
(mapper, value) => new ODBDouble(value),
(mapper, oval) => oval.As<double>()
);
RegisterMapping<float>(
(mapper, value) => new ODBDouble(value),
(mapper, oval) => oval.As<float>()
);
RegisterMapping<DateTime>(
(mapper, value) => new ODBLong(DateTime.MinValue.Equals(value) ? 0 : new DateTimeOffset(value.ToUniversalTime()).ToUnixTimeMilliseconds() ),
(mapper, oval) => DateTimeOffset.FromUnixTimeMilliseconds(oval.As<long>()).DateTime
);
RegisterMapping<TimeSpan>(
(mapper, value) => new ODBDouble(value.TotalMilliseconds),
(mapper, oval) => TimeSpan.FromMilliseconds(oval.As<double>())
);
RegisterMapping<Guid>(
(mapper, value) => new ODBGuid(value),
(mapper, oval) => oval.As<Guid>()
);
RegisterMapping<long>(
(mapper, value) => new ODBLong(value),
(mapper, oval) => oval.As<long>()
);
RegisterMapping<ulong>(
(mapper, value) => new ODBULong(value),
(mapper, oval) => oval.As<ulong>()
);
RegisterMapping<bool>(
(mapper, value) => (bool)value ? ODBBool.True : ODBBool.False,
(mapper, oval) => oval.As<bool>()
);
RegisterMapping<IPv4>(
(mapper, value) => new ODBUInteger(value.AsUInt),
(mapper, oval) => new IPv4(oval.As<uint>())
);
RegisterMapping<IPv6>(
(mapper, value) => new ODBByteBuffer(value.ToCIDRBytes()),
(mapper, oval) => new IPv6((oval.As<byte[]>()))
);
ObjectMapping = new mappings.ObjectMapping();
RegisterMapping(typeof(object),ObjectMapping);
}
public void RegisterMapping(Type nativeType, IODBMapping mapping)
{
lock (mappings)
{
mappings[nativeType] = mapping;
}
}
public void RegisterMapping(Type nativeType, ODBMap map, ODBUnmap unmap)
{
lock (mappings)
{
mappings[nativeType] = new mappings.SimpleMapping(map, unmap);
}
}
public void RegisterMapping<T>(ODBMap<T> map, ODBUnmap<T> unmap)
{
lock (mappings)
{
mappings[typeof(T)] = new mappings.SimpleMapping(
(mapper, value) => map(mapper, (T)value),
(mapper, value) => unmap(mapper, value)
);
}
}
public IStorage GetStorage(Type type) => GetStorage(type, type.FullName);
public IStorage GetStorage(Type type,string typeName)
{
IStorage storage = StorageContainer.GetStorage(typeName);
if (!storage.IsOpen)
storage.Open();
return storage;
}
public IODBMapping GetMapping<T>() => GetMapping(typeof(T));
public IODBMapping GetMapping(Type type)
{
lock (this.mappings)
{
if (type == null)
throw new ArgumentNullException();
if (mappings.ContainsKey(type))
return mappings[type];
if (typeof(string).Equals(type))
{
return null;
}
else if (type.IsGenericType && (type.GetGenericTypeDefinition().Equals(typeof(Dictionary<,>))))
{
mappings.Add(type, new mappings.DictionaryMapping());
return mappings[type];
}
else if (type.IsGenericType && (type.GetGenericTypeDefinition().Equals(typeof(List<>)) || type.GetGenericTypeDefinition().Equals(typeof(HashSet<>))))
{
mappings.Add(type, new mappings.ListMapping(type));
return mappings[type];
}
else if (type.GetInterfaces().Contains(typeof(IDictionary)))
{
mappings.Add(type, new mappings.DictionaryMapping());
return mappings[type];
}
else if (type.IsArray)
{
mappings.Add(type, new mappings.ListMapping(type));
return mappings[type];
}
else if (type.IsEnum)
{
mappings.Add(type, new SimpleMapping(
(mapper, value) => new ODBStringValue(Enum.GetName(type, value)),
(mapper, oval) => Enum.Parse(type, (oval as ODBStringValue).Value as String)
));
return mappings[type];
}
else if (!type.IsPrimitive)
{
mappings.Add(type, new mappings.ClassMapping(type));
return mappings[type];
}
return null;
}
}
//public object GetDocumentID(object o)
//{
// IODBMapping mapping = GetMapping(o.GetType());
// if (mapping is mappings.ClassMapping)
// {
// mappings.ClassMapping classMapping = mapping as mappings.ClassMapping;
// return classMapping.getID(o);
// }
// return null;
//}
//public Type GetDocumentIDType(Type type)
//{
// IODBMapping mapping = GetMapping(type);
// if (mapping is mappings.ClassMapping)
// {
// mappings.ClassMapping classMapping = mapping as mappings.ClassMapping;
// return classMapping.IDType;
// }
// return null;
//}
public virtual ODBEntity MapValue(object value)
{
if (value == null)
return ODBNull.Instance;
IODBMapping mapping = GetMapping(value.GetType());
if (mapping != null)
return mapping.MapValue(this,value);
throw new NotSupportedException(String.Format("Can't map {0} ({1})",value.GetType(),value));
}
public virtual object UnmapValue(Type targetType,ODBEntity value)
{
if (ODBNull.Instance.Equals(value))
return null;
if (value is Document)
{
Document doc = value as Document;
String asmname = doc["__asm__"].As<string>();
String typename = doc["__type__"].As<string>();
if (typename != null)
targetType = Type.GetType(String.Format("{0}, {1}",typename,asmname)); //Assembly.Load(asmname).GetType(typename);
}
//else if (value is ODBTypedValue)
//{
// ODBTypedValue typedValue = value as ODBTypedValue;
// targetType = typedValue.TargetType;
//}
IODBMapping mapping = GetMapping(targetType);
if (mapping != null)
return mapping.UnmapValue(this,value);
return value.As(targetType);
}
public virtual object TryImplicitMapping(object value,Type targetType)
{
Type sourceType = value.GetType();
foreach (MethodInfo mop in sourceType.GetMethods(BindingFlags.Static | BindingFlags.Public))
{
if (mop.Name.Equals("op_Implicit") &&
(mop.ReturnType.Equals(targetType)))
if(
(sourceType.Equals(mop.GetParameters().FirstOrDefault().ParameterType))
)
{
return mop.Invoke(null, new object[] { value });
}
}
return value;
}
public void Dispose()
{}
public static String GetTypeName(Type type)
{
if (type == null)
return null;
return string.Format("{0}, {1}",type.FullName,type.Assembly.GetName().Name);
}
}
}

View File

@ -0,0 +1,307 @@
using System;
using System.Collections;
using ln.types.odb.values;
using System.Collections.Generic;
using System.Linq;
namespace ln.types.odb.ng
{
//public class ObjectCollection : IEnumerable
//{
// public ODBMapper ODBMapper { get; }
// public ODBCollection DocumentCollection { get; private set; }
// public Type ElementType { get; }
// public String CollectionName => DocumentCollection.CollectionName;
// public int Count => DocumentCollection.Count;
// public Type IDType => ODBMapper.GetDocumentIDType(ElementType);
// internal ObjectCollection(ODBMapper odbmapper, Type elementType)
// : this(odbmapper, elementType, elementType.FullName)
// { }
// internal ObjectCollection(ODBMapper odbmapper, Type elementType, String collectionAlias)
// {
// ODBMapper = odbmapper;
// ElementType = elementType;
// DocumentCollection = ODBMapper.StorageContainer.GetCollection(elementType.FullName);
// }
// public object SelectByID(object ID)
// {
// ODBValue documentID = ODBMapper.MapValue(ID);
// return SelectByID(documentID);
// }
// public object SelectByID(ODBValue documentID)
// {
// if (ODBNull.Instance.Equals(documentID))
// return null;
// lock (this)
// {
// object o = GetCachedObject(documentID);
// if (object.ReferenceEquals(null, o))
// {
// ODBDocument document = DocumentCollection.GetDocumentByID(documentID);
// o = ODBMapper.UnmapValue(ElementType, document);
// TouchCache(documentID, o);
// }
// return o;
// }
// }
// public IEnumerable Select(Query query)
// {
// lock (this)
// {
// return new ObjectEnumeration(this, query.Execute(DocumentCollection).ToArray());
// }
// }
// public bool Ensure(object o)
// {
// if (!ElementType.IsInstanceOfType(o))
// throw new ArgumentException(String.Format("Object needs to be of type {0}", ElementType.FullName), nameof(o));
// lock (this)
// {
// ODBDocument document = ODBMapper.MapValue(o) as ODBDocument;
// if (DocumentCollection.Ensure(document))
// {
// TouchCache(document.ID, o);
// return true;
// }
// return false;
// }
// }
// public bool Insert(object o)
// {
// lock (this)
// {
// if (!ElementType.IsInstanceOfType(o))
// throw new ArgumentException(String.Format("Object needs to be of type {0}", ElementType.FullName), nameof(o));
// ODBDocument document = ODBMapper.MapValue(o) as ODBDocument;
// if (DocumentCollection.Insert(document))
// {
// TouchCache(document.ID, o);
// return true;
// }
// return false;
// }
// }
// public bool Update(object o)
// {
// lock (this)
// {
// lock (this)
// {
// if (!ElementType.IsInstanceOfType(o))
// throw new ArgumentException(String.Format("Object needs to be of type {0}", ElementType.FullName), nameof(o));
// ODBDocument document = ODBMapper.MapValue(o) as ODBDocument;
// if (DocumentCollection.Update(document))
// {
// TouchCache(document.ID, o);
// return true;
// }
// return false;
// }
// }
// }
// public bool Upsert(object o)
// {
// if (!ElementType.IsInstanceOfType(o))
// throw new ArgumentException(String.Format("Object needs to be of type {0}", ElementType.FullName), nameof(o));
// lock (this)
// {
// ODBDocument document = ODBMapper.MapValue(o) as ODBDocument;
// if (DocumentCollection.Upsert(document))
// {
// TouchCache(document.ID, o);
// return true;
// }
// return false;
// }
// }
// public bool Delete(object o) => Delete(ODBMapper.MapValue(ODBMapper.GetDocumentID(o)));
// public bool Delete(ODBValue documentID)
// {
// lock (this)
// {
// if (DocumentCollection.Delete(documentID))
// {
// if (objectCache.ContainsKey(documentID))
// objectCache.Remove(documentID);
// return true;
// }
// return false;
// }
// }
// public bool HasProperty(string propName)
// {
// propName = IndexPath.TranslatePropertyPath(ElementType, propName);
// ClassMapping classMapping = ODBMapper.GetMapping(ElementType) as ClassMapping;
// if (classMapping != null)
// {
// return classMapping.HasField(propName);
// }
// return false;
// }
// /* Indeces */
// public void EnsureIndex(string propertyPath, bool unique = false)
// {
// //string translatedPath = IndexPath.TranslatePropertyPath(ElementType, propertyPath);
// //DocumentCollection.EnsureIndex(translatedPath, translatedPath);
// EnsureIndeces(false, new string[] { propertyPath });
// }
// public void EnsureIndeces(params string[] propertyPaths) => EnsureIndeces(false, propertyPaths);
// public void EnsureIndeces(bool unique, params string[] propertyPaths)
// {
// for (int n = 0; n < propertyPaths.Length; n++)
// propertyPaths[n] = IndexPath.TranslatePropertyPath(ElementType, propertyPaths[n]);
// DocumentCollection.EnsureIndeces(propertyPaths, false);
// }
// public void EnsureUniqueness(params string[] propertyPaths)
// {
// for (int n = 0; n < propertyPaths.Length; n++)
// propertyPaths[n] = IndexPath.TranslatePropertyPath(ElementType, propertyPaths[n]);
// DocumentCollection.EnsureUniqueness(propertyPaths);
// }
// /* Object Cache */
// public bool UseStrongCache { get; private set; }
// Dictionary<ODBValue, object> objectCache = new Dictionary<ODBValue, object>();
// public void EnableStrongCache(bool enable)
// {
// lock (this)
// {
// if (!enable)
// {
// foreach (ODBValue key in objectCache.Keys.ToArray())
// {
// if (!(objectCache[key] is WeakReference))
// objectCache.Remove(key);
// }
// }
// else
// {
// foreach (ODBValue key in objectCache.Keys.ToArray())
// {
// if ((objectCache[key] is WeakReference))
// objectCache[key] = (objectCache[key] as WeakReference).Target;
// }
// }
// }
// }
// private object GetCachedObject(ODBValue documentID)
// {
// if (objectCache.ContainsKey(documentID))
// {
// object o = objectCache[documentID];
// if (o is WeakReference)
// {
// WeakReference weak = o as WeakReference;
// if (weak.IsAlive)
// return weak.Target;
// else
// return null;
// }
// return o;
// }
// return null;
// }
// private void TouchCache(ODBValue documentID, object o)
// {
// if (object.ReferenceEquals(o, null) && objectCache.ContainsKey(documentID))
// {
// objectCache.Remove(documentID);
// }
// else if (!object.ReferenceEquals(o, null))
// {
// if (UseStrongCache)
// objectCache[documentID] = o;
// else
// objectCache[documentID] = new WeakReference(o);
// }
// }
// public object[] GetDocumentIDs()
// {
// return DocumentCollection.Index.Select((arg) => ODBMapper.UnmapValue(IDType, arg)).ToArray();
// }
// public IEnumerable GetEnumeration()
// {
// lock (this)
// {
// return new ObjectEnumeration(this, DocumentCollection.Index.ToArray());
// }
// }
// public IEnumerator GetEnumerator()
// {
// return GetEnumeration().GetEnumerator();
// }
// public void Close()
// {
// DocumentCollection = null;
// }
// class ObjectEnumeration : IEnumerable
// {
// ObjectCollection collection;
// IEnumerable<ODBValue> documentIDs;
// public ObjectEnumeration(ObjectCollection collection,IEnumerable<ODBValue> documentIDs)
// {
// this.collection = collection;
// this.documentIDs = documentIDs;
// }
// public IEnumerator GetEnumerator()
// {
// foreach (ODBValue documentID in this.documentIDs)
// {
// yield return this.collection.SelectByID(documentID);
// }
// }
// }
//}
//public class ObjectCollection<T> : ObjectCollection where T:class
//{
// public ObjectCollection(ODBMapper odbmapper)
// :base(odbmapper,typeof(T))
// {}
// public IEnumerable<T> SelectQuery(Query query) => base.Select(query).Cast<T>();
// public T Select(object id) => (T)base.SelectByID(id);
// public bool Ensure(T o) => base.Ensure(o);
// public bool Insert(T o) => base.Insert(o);
// public bool Update(T o) => base.Update(o);
// public bool Upsert(T o) => base.Upsert(o);
// public void Delete(T o) => base.Delete(o);
//}
}

196
ng/Query.cs 100644
View File

@ -0,0 +1,196 @@
// /**
// * File: Query.cs
// * Author: haraldwolff
// *
// * This file and it's content is copyrighted by the Author and / or copyright holder.
// * Any use wihtout proper permission is illegal and may lead to legal actions.
// *
// *
// **/
using System;
using System.Collections.Generic;
using ln.types.btree;
using System.Linq;
using ln.types.odb.ng.index;
using ln.types.odb.ng.storage;
using ln.objects.catalog;
namespace ln.types.odb.ng
{
public abstract class Query
{
private Query()
{
}
public static Query AND(Query query,params Query[] queries)
{
return new IMPL.AND(query, queries);
}
public static Query OR(Query query, params Query[] queries)
{
return new IMPL.OR(query, queries);
}
public static Query NOT(Query query)
{
return new IMPL.NOT(query);
}
public static Query Equals<T>(string propertyName, object value) => Equals(IndexPath.TranslatePropertyPath(typeof(T), propertyName), Mapper.Default.MapValue(value));
public static Query Equals(String propertyName, ODBEntity value)
{
if (object.ReferenceEquals(value,null))
value = ODBNull.Instance;
return IF(propertyName, (v) => value.CompareTo(v)==0);
}
public static Query EqualsNot<T>(string propertyName, ODBEntity value) => EqualsNot(IndexPath.TranslatePropertyPath(typeof(T), propertyName), value);
public static Query EqualsNot(String propertyName, ODBEntity value)
{
if (object.ReferenceEquals(value, null))
value = ODBNull.Instance;
return IF(propertyName, (v) => value.CompareTo(v)!=0);
}
public static Query Equals<T>(string propertyName, ODBEntity[] values) => Equals(IndexPath.TranslatePropertyPath(typeof(T), propertyName), values);
public static Query Equals(String propertyName, ODBEntity[] values)
{
return IF(propertyName, (v) => {
foreach (ODBEntity value in values)
if (value.CompareTo(v) == 0)
return true;
return false;
});
}
public static Query Contains<T, A>(string propertyName, IEnumerable<A> values)
{
ODBEntity[] oValues = values.Select(v => Mapper.Default.MapValue(v)).ToArray();
return IF(IndexPath.TranslatePropertyPath(typeof(T), propertyName), v => oValues.Contains(v));
}
public static Query IF<T>(string propertyName, Predicate<ODBEntity> predicate) => IF(IndexPath.TranslatePropertyPath(typeof(T),propertyName), predicate);
public static Query IF(string propertyName,Predicate<ODBEntity> predicate)
{
return new IMPL.IF(propertyName, predicate);
}
public abstract IEnumerable<Guid> Execute(IStorage storage);
class IMPL
{
public class IF : Query
{
public string PropertyName { get; }
public Predicate<ODBEntity> Predicate { get; }
public IF(string propertyName,Predicate<ODBEntity> predicate)
{
PropertyName = propertyName;
Predicate = predicate;
}
public override IEnumerable<Guid> Execute(IStorage storage)
{
return storage.GetDocumentIDs(PropertyName,Predicate);
}
}
public class AND : Query
{
public Query Query { get; }
public Query[] Queries { get; }
public AND(Query query, params Query[] queries)
{
Query = query;
Queries = queries;
}
public override IEnumerable<Guid> Execute(IStorage storage)
{
BTree<Guid> firstSet = new BTree<Guid>();
firstSet.AddRange(
Query.Execute(storage)
);
foreach (Query aQuery in Queries)
{
if (firstSet.Count == 0)
return new Guid[0];
BTree<Guid> nextSet = new BTree<Guid>();
foreach (Guid documentID in aQuery.Execute(storage))
{
if (firstSet.ContainsKey(documentID))
nextSet.Add(documentID);
}
firstSet = nextSet;
}
return firstSet.Keys;
}
}
public class OR : Query
{
public Query Query { get; }
public Query[] Queries { get; }
public OR(Query query, params Query[] queries)
{
Query = query;
Queries = queries;
}
public override IEnumerable<Guid> Execute(IStorage storage)
{
BTree<Guid> result = new BTree<Guid>();
result.TryAddRange(
Query.Execute(storage)
);
foreach (Query aQuery in Queries)
{
foreach (Guid documentID in aQuery.Execute(storage))
result.TryAdd(documentID);
}
return result.Keys;
}
}
public class NOT : Query
{
public Query Query { get; }
public NOT(Query query)
{
Query = query;
}
public override IEnumerable<Guid> Execute(IStorage storage)
{
BTree<Guid> notSet = new BTree<Guid>();
BTree<Guid> result = new BTree<Guid>();
notSet.AddRange(Query.Execute(storage));
foreach (Guid documentID in storage.GetDocumentIDs())
{
if (!notSet.ContainsKey(documentID))
result.Add(documentID);
}
return result.Keys;
}
}
}
}
}

36
ng/Reference.cs 100644
View File

@ -0,0 +1,36 @@
using System;
namespace ln.types.odb.ng
{
//public class Reference<T> where T:class
//{
// ODBMapper mapper;
// ObjectCollection<T> Collection => mapper.GetCollection<T>();
// internal object valueID;
// public Reference(ODBMapper mapper)
// {
// this.mapper = mapper;
// }
// public Reference(ODBMapper mapper,T value)
// {
// this.mapper = mapper;
// this.Value = value;
// }
// public T Value
// {
// get
// {
// if (this.valueID == null)
// return null;
// return this.Collection.Select(valueID);
// }
// set
// {
// this.Collection.Ensure(value);
// this.valueID = this.mapper.GetDocumentID(value);
// }
// }
//}
}

57
ng/diff/Diff.cs 100644
View File

@ -0,0 +1,57 @@
using System;
using ln.objects.catalog;
namespace ln.types.odb.ng.diff
{
public abstract class Diff
{
public abstract ODBEntity Apply(ODBEntity src);
public static Diff Construct(ODBEntity src,ODBEntity dst)
{
if (!src.GetType().Equals(dst.GetType()))
{
return new SimpleDiff(dst);
}
else if (src is Document)
{
return new DocumentDiff(src as Document, dst as Document);
} else if (src is ODBList)
{
return new ListDiff(src as ODBList, dst as ODBList);
}
return new SimpleDiff(dst);
}
public string TreeString => ToTreeString(0);
public abstract string ToTreeString(int indent);
class SimpleDiff : Diff
{
public ODBEntity DestinationValue { get; }
public SimpleDiff(ODBEntity dst)
{
DestinationValue = dst;
}
public override ODBEntity Apply(ODBEntity src)
{
return DestinationValue;
}
public override string ToString()
{
return String.Format("[SimpleDiff DestinationValue={0}]",DestinationValue);
}
public override string ToTreeString(int indent)
{
return String.Format("= {0}", DestinationValue.ToTreeString(indent+2));
}
}
}
}

View File

@ -0,0 +1,60 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using ln.objects.catalog;
namespace ln.types.odb.ng.diff
{
public class DocumentDiff : Diff
{
Dictionary<ODBEntity, Diff> propertyDiffs = new Dictionary<ODBEntity, Diff>();
public DocumentDiff(Document src, Document dst)
{
HashSet<ODBEntity> keys = new HashSet<ODBEntity>(src.Keys);
foreach (ODBEntity key in dst.Keys)
keys.Add(key);
foreach (ODBEntity key in keys)
{
if (src[key].CompareTo(dst[key]) != 0)
propertyDiffs.Add(key, Diff.Construct(src[key], dst[key]));
}
}
public override ODBEntity Apply(ODBEntity src)
{
Document srcDocument = src as Document;
foreach (ODBValue key in propertyDiffs.Keys)
{
srcDocument[key] = propertyDiffs[key].Apply(srcDocument[key]);
}
return src;
}
public override string ToString()
{
return String.Format("[DocumentDiff ChangedProperties=({0})]",string.Join(",",propertyDiffs.Keys));
}
public override string ToTreeString(int indent)
{
indent += 2;
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.AppendFormat("DocumentDiff ChangedProperties=({0})",string.Join(",",propertyDiffs.Keys));
foreach (ODBEntity key in propertyDiffs.Keys)
{
stringBuilder.AppendLine();
stringBuilder.AppendFormat("{0}{1,-32} {2}", new string(' ', indent), key, propertyDiffs[key].ToTreeString(indent));
}
return stringBuilder.ToString();
}
}
}

102
ng/diff/ListDiff.cs 100644
View File

@ -0,0 +1,102 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using ln.objects.catalog;
namespace ln.types.odb.ng.diff
{
public class ListDiff : Diff
{
List<ODBValue> remove = new List<ODBValue>();
List<ODBEntity> add = new List<ODBEntity>();
Dictionary<ODBValue, Diff> update = new Dictionary<ODBValue, Diff>();
public ListDiff(ODBList src,ODBList dst)
{
HashSet<ODBEntity> srcItems = new HashSet<ODBEntity>(src);
HashSet<ODBEntity> common = new HashSet<ODBEntity>(srcItems);
HashSet<ODBEntity> dstItems = new HashSet<ODBEntity>(dst);
HashSet<ODBEntity> commonDst = new HashSet<ODBEntity>(dstItems);
common.IntersectWith(dstItems);
commonDst.IntersectWith(srcItems);
srcItems.ExceptWith(common);
dstItems.ExceptWith(common);
remove.AddRange(srcItems.Select((i) => i.Identity));
add.AddRange(dstItems);
Dictionary<ODBValue, ODBEntity> srcLookup = new Dictionary<ODBValue, ODBEntity>();
foreach (ODBEntity entity in common)
srcLookup.Add(entity.Identity, entity);
foreach (ODBEntity entity in commonDst)
{
ODBValue identity = entity.Identity;
ODBEntity srcEntity = srcLookup[identity];
if (entity.CompareTo(srcEntity) != 0)
update.Add(identity, Diff.Construct(srcEntity, entity));
}
}
public override ODBEntity Apply(ODBEntity src)
{
ODBList list = src as ODBList;
foreach (ODBEntity entity in list.ToArray())
{
ODBValue identity = entity.Identity;
if (remove.Contains(identity))
{
list.Remove(entity);
} else if (update.ContainsKey(identity))
{
update[identity].Apply(entity);
}
}
foreach (ODBEntity entity in add)
{
list.Add(entity.Clone());
}
return src;
}
public override string ToString()
{
return base.ToString();
}
public override string ToTreeString(int indent)
{
indent += 2;
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.AppendFormat("ListDiff Remove={0} Change={1} Add={2}",remove.Count,update.Count,add.Count);
foreach (ODBEntity key in remove)
{
stringBuilder.AppendLine();
stringBuilder.AppendFormat("{0}-{1}", new string(' ', indent), key);
}
foreach (ODBValue key in update.Keys)
{
stringBuilder.AppendLine();
stringBuilder.AppendFormat("{0} {1} {2}", new string(' ', indent), key, update[key].ToTreeString(indent));
}
foreach (ODBEntity key in add)
{
stringBuilder.AppendLine();
stringBuilder.AppendFormat("{0}+{1}", new string(' ', indent), key.ToTreeString(indent));
}
return stringBuilder.ToString();
}
}
}

34
ng/index/Index.cs 100644
View File

@ -0,0 +1,34 @@
using System;
using System.Collections.Generic;
using ln.objects.catalog;
namespace ln.types.odb.ng.index
{
public abstract class Index
{
public string IndexName { get; protected set; }
public Path IndexPath { get; protected set; }
protected Index()
{
}
protected Index(Path indexPath)
:this()
{
IndexPath = indexPath;
IndexName = IndexPath.Complete;
}
public virtual void Replace(Guid documentID, ODBEntity value) {
Remove(documentID);
Add(documentID, value);
}
public abstract void Add(Guid documentID, ODBEntity value);
public abstract void Remove(Guid documentID);
public abstract IEnumerable<Guid> GetDocumentIDs(Predicate<ODBEntity> predicate);
public abstract bool LoadIndex(string basePath, long lastCloseTimestamp);
public abstract bool SaveIndex(string basePath, long lastCloseTimestamp);
}
}

View File

@ -0,0 +1,435 @@
using System;
using System.Collections.Generic;
using System.Collections;
using System.Reflection;
using System.Linq;
using System.Runtime.InteropServices.ComTypes;
using ln.objects.catalog;
namespace ln.types.odb.ng.index
{
public abstract class IndexPath
{
public static IndexPath Build(Path path)
{
if (path.Element == null)
return new FinalPath(path);
if (path.Element.Equals("[]"))
return new ListPath();
return new DocumentPath();
}
public abstract IEnumerable<Index> GetIndeces();
public abstract bool Ensure(Path path);
public abstract bool Indexed(Path path);
public virtual void Replace(Guid documentID, ODBEntity value)
{
Remove(documentID);
Add(documentID, value);
}
public abstract void Add(Guid documentID, ODBEntity value);
public abstract void Remove(Guid documentID);
public abstract IEnumerable<Guid> GetDocumentIDs(Path path, Predicate<ODBEntity> predicate);
public virtual IEnumerable<Guid> GetDocumentIDs(string path, Predicate<ODBEntity> predicate)
{
return GetDocumentIDs(SplitPath(path), predicate);
}
public class DocumentPath : IndexPath
{
Dictionary<string, IndexPath> children = new Dictionary<string, IndexPath>();
public DocumentPath()
{
}
public override IEnumerable<Guid> GetDocumentIDs(Path path, Predicate<ODBEntity> predicate)
{
return children[path.Element].GetDocumentIDs(path.Next, predicate);
}
public override IEnumerable<Index> GetIndeces()
{
return children.Values.SelectMany((arg) => arg.GetIndeces());
}
public override bool Indexed(Path path)
{
if (children.ContainsKey(path.Element))
return children[path.Element].Indexed(path.Next);
return false;
}
public override void Remove(Guid documentID)
{
foreach (IndexPath next in children.Values)
next.Remove(documentID);
}
public override void Add(Guid documentID, ODBEntity value)
{
foreach (string childName in children.Keys)
{
children[childName].Add(documentID, (value as Document)[childName]);
}
}
public override void Replace(Guid documentID, ODBEntity value)
{
foreach (string childName in children.Keys)
{
children[childName].Replace(documentID, (value as Document)[childName]);
}
}
public override bool Ensure(Path path)
{
bool added = false;
if (!children.ContainsKey(path.Element))
{
children.Add(path.Element, IndexPath.Build(path.Next));
added = true;
}
return children[path.Element].Ensure(path.Next) || added;
}
}
class ListPath : IndexPath
{
IndexPath nextPath;
public ListPath()
{
}
public override IEnumerable<Index> GetIndeces() => nextPath.GetIndeces();
public override bool Ensure(Path path)
{
bool added = false;
if (nextPath == null)
{
nextPath = IndexPath.Build(path.Next);
added = true;
}
return nextPath.Ensure(path.Next) || added;
}
public override bool Indexed(Path path)
{
return nextPath.Indexed(path.Next);
}
public override IEnumerable<Guid> GetDocumentIDs(Path path, Predicate<ODBEntity> predicate)
{
return nextPath.GetDocumentIDs(path.Next, predicate);
}
public override void Remove(Guid documentID)
{
nextPath.Remove(documentID);
}
public override void Add(Guid documentID, ODBEntity value)
{
if (!ODBNull.Instance.Equals(value))
foreach (ODBEntity v in ((ODBList)value))
{
nextPath.Add(documentID, v);
}
}
}
class FinalPath : IndexPath
{
Path indexPath;
Index index;
public FinalPath(Path indexPath)
{
this.indexPath = indexPath;
}
public override IEnumerable<Index> GetIndeces() => new Index[] { index };
public override bool Indexed(Path path) => true;
public override IEnumerable<Guid> GetDocumentIDs(Path path, Predicate<ODBEntity> predicate)
{
return index.GetDocumentIDs(predicate);
}
public override void Remove(Guid documentID)
{
index.Remove(documentID);
}
public override void Add(Guid documentID, ODBEntity value)
{
index.Add(documentID, value);
}
public override void Replace(Guid documentID, ODBEntity value)
{
index.Replace(documentID, value);
}
public override bool Ensure(Path path)
{
if (path.Element != null)
throw new NotSupportedException();
if (index == null)
{
index = new SimpleIndex(indexPath);
return true;
}
return false;
}
}
public static Path SplitPath(string path)
{
List<string> pathTokens = new List<string>();
foreach (string primaryToken in path.Split('.'))
{
string suffix = "";
string token = primaryToken;
int i = token.IndexOf('[');
if (i > 0)
{
suffix = token.Substring(i);
token = token.Substring(0, i);
}
pathTokens.Add(token);
string s = suffix;
while (s.Length > 0)
{
if (s.StartsWith("[]", StringComparison.InvariantCulture))
{
pathTokens.Add("[]");
s = s.Substring(2);
}
else
{
throw new NotSupportedException();
}
}
}
return new Path(pathTokens.ToArray());
}
public static string TranslatePropertyPath(Type elementType, string propPath)
{
String[] path = propPath.Split('.');
Type currentType = elementType;
for (int n = 0; n < path.Length; n++)
{
string suffix = "";
string token = path[n];
int i = token.IndexOf('[');
if (i > 0)
{
suffix = token.Substring(i);
token = token.Substring(0, i);
}
FieldInfo fieldInfo = currentType.GetField(token, BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic);
if (fieldInfo == null)
{
string backingFieldName = String.Format("<{0}>k__BackingField", token);
PropertyInfo propertyInfo = currentType.GetProperty(token, BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance);
if (propertyInfo != null)
{
fieldInfo = currentType.GetField(backingFieldName, BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance);
if (fieldInfo == null)
throw new NotImplementedException("can't identify backing field for property");
}
else
{
throw new ArgumentOutOfRangeException(nameof(path));
}
}
token = fieldInfo.Name;
currentType = fieldInfo.FieldType;
string s = suffix;
while (s.Length > 0)
{
if (s.StartsWith("[]", StringComparison.InvariantCulture))
{
if (currentType.IsArray)
currentType = currentType.GetElementType();
else if (currentType.IsGenericType)
currentType = currentType.GetGenericArguments()[0];
else
throw new NotSupportedException();
s = s.Substring(2);
}
else
{
throw new NotSupportedException();
}
}
path[n] = String.Format("{0}{1}", token, suffix);
}
return String.Join(".", path);
}
}
//class wait {
// protected Index Index;
// IndexPath NextIndexPath;
// protected IndexPath()
// {}
// protected IndexPath(Index index)
// {
// Index = index;
// }
// public IndexPath(Index index, Queue<string> path)
// :this(index)
// {
// NextIndexPath = CreateNextIndexPath(path);
// }
// private IndexPath CreateNextIndexPath(Queue<string> path)
// {
// if (path.Count == 0)
// {
// return new IndexUpdatePath(Index);
// }
// else
// {
// return new PropertyPath(propertyIndex, path);
// }
// }
// public virtual void UpdateIndex(ODBDocument document)
// {
// propertyIndex.TryRemove(document.ID);
// UpdateIndex(document, document);
// }
// public virtual void UpdateIndex(ODBDocument document, ODBValue element)
// {
// NextIndexPath.UpdateIndex(document, element);
// }
// public virtual IEnumerable<ODBValue> Retrieve(ODBValue element)
// {
// return NextIndexPath.Retrieve(element);
// }
// class PropertyPath : IndexPath
// {
// string propertyName;
// public PropertyPath(PropertyIndex propertyIndex,Queue<string> path)
// :base(propertyIndex)
// {
// propertyName = path.Dequeue();
// NextIndexPath = CreateNextIndexPath(path);
// while (propertyName.EndsWith("[]",StringComparison.InvariantCulture))
// {
// NextIndexPath = new EnumPath(propertyIndex, NextIndexPath);
// propertyName = propertyName.Substring(0, propertyName.Length - 2);
// }
// }
// public override void UpdateIndex(ODBDocument document, ODBValue element)
// {
// if (element is ODBDocument)
// {
// ODBDocument edoc = element as ODBDocument;
// NextIndexPath.UpdateIndex(document, edoc[propertyName]);
// }
// }
// public override IEnumerable<ODBValue> Retrieve(ODBValue element)
// {
// if (element is ODBDocument)
// {
// ODBDocument edoc = element as ODBDocument;
// return NextIndexPath.Retrieve(edoc[propertyName]);
// }
// return new ODBValue[0];
// }
// }
// class EnumPath : IndexPath
// {
// public EnumPath(PropertyIndex propertyIndex,IndexPath nextIndexPath)
// : base(propertyIndex)
// {
// NextIndexPath = nextIndexPath;
// }
// public override void UpdateIndex(ODBDocument document, ODBValue element)
// {
// if (element is ODBList)
// {
// ODBList elist = element as ODBList;
// foreach (ODBValue le in elist)
// {
// NextIndexPath.UpdateIndex(document, le);
// }
// }
// }
// public override IEnumerable<ODBValue> Retrieve(ODBValue element)
// {
// if (element is ODBList)
// {
// ODBList elist = element as ODBList;
// return elist.SelectMany(e => NextIndexPath.Retrieve(e));
// }
// return new ODBValue[0];
// }
// }
// class IndexUpdatePath : IndexPath
// {
// public IndexUpdatePath(PropertyIndex propertyIndex)
// : base(propertyIndex)
// {
// }
// public override void UpdateIndex(ODBDocument document, ODBValue element)
// {
// propertyIndex.Add(element, document.ID);
// }
// public override IEnumerable<ODBValue> Retrieve(ODBValue element)
// {
// return new ODBValue[] { element };
// }
// }
//}
}

66
ng/index/Path.cs 100644
View File

@ -0,0 +1,66 @@
using System;
using System.Collections.Generic;
using System.Collections;
using System.Linq;
using System.Text;
namespace ln.types.odb.ng.index
{
public class Path
{
public String Element { get; }
public Path Next { get; }
public Path Parent { get; }
private Path(Path parent,IEnumerator<string> path)
{
Parent = parent;
if (path.MoveNext())
{
Element = path.Current;
Next = new Path(this,path);
}
else
{
Element = null;
Next = null;
}
}
public Path(String[] path)
:this(null,((IEnumerable<string>)path).GetEnumerator())
{
}
public string Complete
{
get
{
List<string> path = new List<string>();
Climb(path);
StringBuilder sb = new StringBuilder();
sb.Append(path[0]);
path.RemoveAt(0);
foreach (String pe in path)
{
if (!pe.Equals("[]"))
sb.Append('_');
sb.Append(pe);
}
return sb.ToString();
}
}
private void Climb(List<string> path)
{
if (Parent != null)
Parent.Climb(path);
if (Element != null)
path.Add(Element);
}
}
}

View File

@ -0,0 +1,108 @@
using System;
using System.Collections.Generic;
using ln.types.btree;
using System.Linq;
using System.IO;
using ln.logging;
using ln.objects.catalog;
namespace ln.types.odb.ng.index
{
public class SimpleIndex : Index
{
BTreeValueList<ODBEntity, Guid> valueIndex = new BTreeValueList<ODBEntity, Guid>();
BTreeValueList<Guid, ODBEntity> reverseIndex = new BTreeValueList<Guid, ODBEntity>();
public SimpleIndex(Path path)
: base(path)
{
}
public override IEnumerable<Guid> GetDocumentIDs(Predicate<ODBEntity> predicate)
{
HashSet<Guid> matchedIDs = new HashSet<Guid>();
foreach (ODBEntity value in valueIndex.Keys)
{
if (predicate(value))
foreach (Guid id in valueIndex[value])
matchedIDs.Add(id);
}
return matchedIDs;
}
public override void Add(Guid documentID, ODBEntity value)
{
valueIndex.Add(value, documentID);
reverseIndex.Add(documentID, value);
}
public override void Remove(Guid documentID)
{
if (reverseIndex.ContainsKey(documentID))
{
foreach (ODBEntity value in reverseIndex[documentID].ToArray())
{
valueIndex.Remove(value, documentID);
}
reverseIndex.Remove(documentID);
}
}
public override bool LoadIndex(string basePath, long lastCloseTimestamp)
{
if (File.Exists(System.IO.Path.Combine(basePath, String.Format("{0}.idx", IndexName))))
using (FileStream fileStream = new FileStream(System.IO.Path.Combine(basePath, String.Format("{0}.idx", IndexName)), FileMode.Open))
{
byte[] indexBytes = fileStream.ReadBytes((int)fileStream.Length);
Document indexDocument = new Document(indexBytes, 0, indexBytes.Length);
long idxLastCloseTimestamp = indexDocument["LastCloseTimestamp"].As<long>();
if (idxLastCloseTimestamp != lastCloseTimestamp)
{
Logging.Log(LogLevel.WARNING, "Index timestamp {0} is not matching ( {1} != {2} )", IndexName, idxLastCloseTimestamp, lastCloseTimestamp);
return false;
}
else
{
foreach (ODBEntity key in indexDocument.Keys)
{
if (key is ODBGuid)
{
Guid documentID = key.As<Guid>();
ODBList valueList = indexDocument[key] as ODBList;
foreach (ODBEntity value in valueList)
{
Add(documentID, value);
}
}
}
return true;
}
}
return false;
}
public override bool SaveIndex(string basePath, long lastCloseTimestamp)
{
Document indexDocument = new Document();
indexDocument["LastCloseTimestamp"] = new ODBLong(lastCloseTimestamp);
foreach (Guid documentID in reverseIndex.Keys)
{
ODBList valueList = new ODBList();
valueList.AddRange(reverseIndex[documentID]);
indexDocument[new ODBGuid(documentID)] = valueList;
}
byte[] indexBytes = indexDocument.GetStorageBytes();
using (FileStream fileStream = new FileStream(System.IO.Path.Combine(basePath, String.Format("{0}.idx", IndexName)), FileMode.Create))
{
fileStream.WriteBytes(indexBytes);
fileStream.Close();
}
return true;
}
}
}

View File

@ -0,0 +1,225 @@
using System;
using System.Reflection;
using System.Collections.Generic;
using ln.logging;
using System.Linq;
using ln.types.collections;
using ln.objects.catalog;
namespace ln.types.odb.ng.mappings
{
public class ClassMapping : IODBMapping
{
public delegate object GetID(object o);
public delegate object SetID(object o, object id);
public Type MappedType { get; }
public bool IsReferenceType => !MappedType.IsValueType;
List<FieldInfo> mappedFields = new List<FieldInfo>();
Func<Mapper, Document, object> createObjectHook;
Func<FieldInfo, bool> filterFieldsHook;
private ClassMapping() { }
public ClassMapping(Type type) : this(type, null, null) { }
public ClassMapping(Type type, Func<Mapper, Document, object> createObjectHook, Func<FieldInfo, bool> filterFieldsHook)
{
Logging.Log(LogLevel.DEBUG, "Constructing ClassMapping for {0}", type);
MappedType = type;
this.createObjectHook = createObjectHook;
this.filterFieldsHook = filterFieldsHook;
AddFields(type);
}
private void AddFields(Type type)
{
foreach (FieldInfo fieldinfo in type.GetFields(BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public))
{
if ((filterFieldsHook == null) || filterFieldsHook(fieldinfo))
mappedFields.Add(fieldinfo);
}
if ((type != null) && !type.IsValueType && (!typeof(object).Equals(type.BaseType)))
{
AddFields(type.BaseType);
}
}
public bool HasField(String name)
{
foreach (FieldInfo fieldInfo in mappedFields)
if (fieldInfo.Name.Equals(name))
return true;
return false;
}
private object GetObjectForDocument(Mapper mapper,Document document)
{
if (IsReferenceType)
{
if (mapper.IdentityCache.TryGetValue(document.ID, out object o, () => (createObjectHook != null) ? createObjectHook(mapper, document) : Activator.CreateInstance(MappedType, true) ))
return o;
}
return (createObjectHook != null) ? createObjectHook(mapper,document) : Activator.CreateInstance(MappedType, true);
}
public object UnmapValue(Mapper mapper,ODBEntity oval)
{
Document document = oval as Document;
object o = GetObjectForDocument(mapper,document);
Apply(mapper, document, o);
return o;
}
public void Apply(Mapper mapper,Document document,object o)
{
foreach (FieldInfo fieldInfo in mappedFields)
{
object fv = mapper.UnmapValue(fieldInfo.FieldType, document[fieldInfo.Name]);
if (!object.ReferenceEquals(fv, null))
{
Type st = fv.GetType();
if (st != fieldInfo.FieldType)
{
fv = mapper.TryImplicitMapping(fv, fieldInfo.FieldType);
st = fv.GetType();
}
if (st != fieldInfo.FieldType)
fv = Convert.ChangeType(fv, fieldInfo.FieldType);
}
if (!object.ReferenceEquals(null, fv))
fieldInfo.SetValue(o, fv);
}
}
public Document MapDocument(Mapper mapper,Guid documentID,object value)
{
Document document = new Document(documentID);
document["__asm__"] = new ODBStringValue(value.GetType().Assembly.GetName().Name);
document["__type__"] = new ODBStringValue(value.GetType().FullName);
foreach (FieldInfo fieldInfo in mappedFields)
{
object fv = fieldInfo.GetValue(value);
ODBEntity ov = null;
ov = mapper.MapValue(fv);
document[fieldInfo.Name] = ov;
}
if (IsReferenceType)
{
mapper.IdentityCache.Ensure(documentID, value);
}
return document;
}
public ODBEntity MapValue(Mapper mapper,object value)
{
if (Object.ReferenceEquals(value, null))
return ODBNull.Instance;
if (!mapper.IdentityCache.TryGetIdentity(value, out Guid documentID))
documentID = Guid.NewGuid();
return MapDocument(mapper, documentID, value);
}
public Type GetFieldType(Mapper mapper,string fieldName)
{
foreach (FieldInfo fieldInfo in mappedFields)
if (fieldInfo.Name.Equals(fieldName))
return fieldInfo.FieldType;
throw new KeyNotFoundException();
}
}
public class ObjectMapping : IODBMapping
{
public ODBEntity MapValue(Mapper mapper, object value)
{
return new Document();
}
public object UnmapValue(Mapper mapper, ODBEntity oval)
{
if (oval is Document)
{
Document document = oval as Document;
if (!document.Contains(new ODBStringValue("__type__")))
return new object();
Type dType = Type.GetType(String.Format("{0}, {1}",document["__type__"].As<String>(), document["__asm__"].As<String>()));
return mapper.UnmapValue(dType, oval);
} else if (oval is ODBList)
{
ODBList list = oval as ODBList;
Array array = (Array)mapper.UnmapValue(typeof(object[]), list);
if (array.Length > 0)
{
/* Magic: Try to find out about the arrays real element type */
HashSet<Type> eTypes = null;
foreach (object v in array)
{
HashSet<Type> myTypes = new HashSet<Type>();
Type eType = v?.GetType();
while (eType != null)
{
myTypes.Add(eType);
eType = eType.BaseType;
}
if (eTypes == null)
{
eTypes = myTypes;
}
else
{
eTypes.IntersectWith(myTypes);
}
}
foreach (Type t in eTypes.ToArray())
{
if (eTypes.Contains(t.BaseType))
eTypes.Remove(t.BaseType);
}
Type baseElementType = eTypes.First();
if (!array.GetType().GetElementType().Equals(baseElementType))
{
Array tarray = Array.CreateInstance(baseElementType, array.Length);
Array.Copy(array, tarray, array.Length);
array = tarray;
}
}
return array;
}
else if (oval is ODBValue)
{
return (oval as ODBValue).Value;
}
throw new NotImplementedException();
}
}
}

View File

@ -0,0 +1,84 @@
using System;
using System.Linq;
using System.Collections;
using System.Reflection;
using System.Collections.Generic;
using ln.objects.catalog;
namespace ln.types.odb.ng.mappings
{
public class DictionaryMapping : IODBMapping
{
public DictionaryMapping()
{
}
public ODBEntity MapValue(Mapper mapper, object value)
{
Type dType = value.GetType();
if (dType.GetInterfaces().Contains(typeof(IDictionary)))
{
IDictionary dictionary = value as IDictionary;
Document document = new Document();
document["__asm__"] = new ODBStringValue(value.GetType().Assembly.GetName().Name);
document["__type__"] = new ODBStringValue(value.GetType().FullName);
Document kTypes = new Document();
Document vTypes = new Document();
document["__ktypes__"] = kTypes;
document["__vtypes__"] = vTypes;
foreach (object key in dictionary.Keys)
{
object v = dictionary[key];
ODBEntity okey = mapper.MapValue(key);
document[okey] = mapper.MapValue(v);
kTypes[okey] = new ODBStringValue(Mapper.GetTypeName(key?.GetType()));
vTypes[okey] = new ODBStringValue(Mapper.GetTypeName(v?.GetType()));
}
return document;
}
throw new NotImplementedException();
}
public object UnmapValue(Mapper mapper, ODBEntity oval)
{
Document document = oval as Document;
Type dType = Type.GetType(String.Format("{0}, {1}",document["__type__"].As<String>(),document["__asm__"].As<String>())); //;Assembly.Load(document["__asm__"].AsString).GetType(document["__type__"].AsString);
if (dType.IsGenericType)
{
IDictionary dictionary = (IDictionary)Activator.CreateInstance(dType, true);
if (dType.GetGenericTypeDefinition().Equals(typeof(Dictionary<,>)))
{
Type kType = dType.GetGenericArguments()[0];
Type vType = dType.GetGenericArguments()[1];
Document ktypes = document.Contains("__ktypes__") ? document["__ktypes__"] as Document : new Document();
Document vtypes = document.Contains("__vtypes__") ? document["__vtypes__"] as Document : new Document();
foreach (ODBEntity key in document.Keys)
{
string skey = key.As<String>();
if (!skey.StartsWith("__", StringComparison.InvariantCulture) || !skey.EndsWith("__",StringComparison.InvariantCulture))
{
Type kt = ktypes.Contains(key) ? Type.GetType(ktypes[key].As<String>()) : kType;
Type vt = vtypes.Contains(key) ? Type.GetType(vtypes[key].As<String>()) : vType;
dictionary.Add(mapper.UnmapValue(kt, key), mapper.UnmapValue(vt, document[key]));
}
}
return dictionary;
}
}
throw new NotSupportedException();
}
}
}

View File

@ -0,0 +1,146 @@
// /**
// * File: ListMapping.cs
// * Author: haraldwolff
// *
// * This file and it's content is copyrighted by the Author and / or copyright holder.
// * Any use wihtout proper permission is illegal and may lead to legal actions.
// *
// *
// **/
using System;
using System.Collections;
using System.Linq;
using System.Collections.Generic;
using System.Reflection;
using ln.objects.catalog;
namespace ln.types.odb.ng.mappings
{
public class ListMapping : IODBMapping
{
public Type TargetType { get; }
public ListMapping(Type targetType)
{
TargetType = targetType;
}
private Array UnmapArray(Mapper mapper, ODBList list)
{
Array array = Array.CreateInstance(TargetType.GetElementType(), list.Count);
for (int n = 0; n < list.Count; n++)
array.SetValue(mapper.UnmapValue(TargetType.GetElementType(), list[n]), n);
return array;
}
private ODBList MapArray(Mapper mapper, Array array)
{
ODBList list = new ODBList();
for (int n = 0; n < array.Length; n++)
{
list.Add(mapper.MapValue(array.GetValue(n)));
}
return list;
}
public object UnmapList(Mapper mapper, ODBList list)
{
IList ilist = (IList)Activator.CreateInstance(TargetType, true);
for (int n = 0; n < list.Count; n++)
{
ilist.Add(mapper.UnmapValue(TargetType.GetGenericArguments()[0], list[n]));
}
return ilist;
}
public ODBList MapList(Mapper mapper, object value)
{
ODBList list = new ODBList();
IList ilist = (IList)value;
for (int n = 0; n < ilist.Count; n++)
list.Add(mapper.MapValue(ilist[n]));
return list;
}
public object UnmapSet(Mapper mapper, ODBList list)
{
Type entype = typeof(UnmappingEnumeration<>).MakeGenericType(TargetType.GetGenericArguments());
object en = Activator.CreateInstance(entype, mapper, list);
return Activator.CreateInstance(TargetType, en );
}
public ODBList MapSet(Mapper mapper, object value)
{
ODBList list = new ODBList();
IEnumerable ienum = (IEnumerable)value;
foreach (object item in ienum)
list.Add(mapper.MapValue(item));
return list;
}
public object UnmapValue(Mapper mapper, ODBEntity oval)
{
if (TargetType.IsArray)
return UnmapArray(mapper, (ODBList)oval);
if (TargetType.IsGenericType)
{
if (typeof(List<>).Equals(TargetType.GetGenericTypeDefinition()))
{
return UnmapList(mapper, (ODBList)oval);
}
else if (typeof(HashSet<>).Equals(TargetType.GetGenericTypeDefinition()))
{
return UnmapSet(mapper, (ODBList)oval);
}
}
throw new NotImplementedException();
}
public ODBEntity MapValue(Mapper mapper, object value)
{
if (TargetType.IsArray)
return MapArray(mapper, (Array)value);
if (TargetType.IsGenericType)
{
if (typeof(List<>).Equals(TargetType.GetGenericTypeDefinition()))
{
return MapList(mapper, value);
}
else if (typeof(HashSet<>).Equals(TargetType.GetGenericTypeDefinition()))
{
return MapSet(mapper, value);
}
}
throw new NotImplementedException();
}
class UnmappingEnumeration<T> : IEnumerable<T>
{
Type TargetType;
Mapper mapper;
ODBList list;
public UnmappingEnumeration(Mapper mapper,ODBList list)
{
TargetType = typeof(T);
this.mapper = mapper;
this.list = list;
}
public IEnumerator<T> GetEnumerator()
{
foreach (ODBEntity item in list)
yield return (T)mapper.UnmapValue(TargetType, item);
}
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
}
}
}

View File

@ -0,0 +1,45 @@
using System;
using ln.objects.catalog;
namespace ln.types.odb.ng.mappings
{
public class ReferenceMapping<T> : IODBMapping where T:class
{
public Type ReferencedType { get; private set; }
public String ReferencedCollectionName { get; set; }
public ReferenceMapping()
{
ReferencedType = typeof(T);
ReferencedCollectionName = ReferencedType.FullName;
}
public ODBValue MapValue(ODBMapper mapper, object value)
{
if (value == null)
{
return ODBNull.Instance;
}
else
{
ClassMapping classMapping = mapper.GetMapping(ReferencedType) as ClassMapping;
if (classMapping == null)
throw new NullReferenceException(String.Format("classMapping not found for Type {0}",ReferencedType));
object referenceID = classMapping.getID(value);
return mapper.MapValue(referenceID);
}
}
public object UnmapValue(ODBMapper mapper, ODBValue oval)
{
ClassMapping classMapping = mapper.GetMapping(ReferencedType) as ClassMapping;
if (classMapping == null)
throw new NullReferenceException(String.Format("classMapping not found for Type {0}", ReferencedType));
object referenceID = mapper.UnmapValue(classMapping.IDType, oval);
T referenced = mapper.GetCollection<T>(ReferencedCollectionName).Select(referenceID);
return referenced;
}
}
}

View File

@ -0,0 +1,26 @@
using System;
using ln.objects.catalog;
namespace ln.types.odb.ng.mappings
{
public class SimpleMapping : IODBMapping
{
ODBMap map;
ODBUnmap unmap;
public SimpleMapping(ODBMap map, ODBUnmap unmap)
{
this.map = map;
this.unmap = unmap;
}
public ODBEntity MapValue(Mapper mapper, object value)
{
return map(mapper, value);
}
public object UnmapValue(Mapper mapper, ODBEntity oval)
{
return unmap(mapper, oval);
}
}
}

View File

@ -0,0 +1,64 @@
using System;
using System.Collections.Generic;
using ln.objects.catalog;
namespace ln.types.odb.ng.storage
{
public interface IStorage : IDisposable
{
bool Open();
void Close();
bool IsOpen { get; }
/// <summary>
/// Load the specified documentID.
/// </summary>
/// <returns>The loaded Document.</returns>
/// <param name="documentID">Document identifier.</param>
/// <remarks>
/// Non caching storage will return the latest version of the document with each call.
/// Caching storage will return the same Document instance with each call, as long as the document stayed in the cache.
/// <b>No refreshing is done if a cached instance is returned!</b>
/// </remarks>
Document Load(Guid documentID);
/// <summary>
/// Refresh the specified document.
/// </summary>
/// <returns><see langword="true"/> if the document was refreshed successfully</returns>
/// <param name="document">Document to be refreshed. Must have been loaded by a call to <c>Load(..)</c> of the same <c>IStorage</c> instance</param>
/// <remarks>will reload the document from storage and apply the loaded state to this instance.
/// If <paramref name="document"/> references an instance that has not been returned by a call to <c>Load(..)</c> (e.g. by using <c>Clone()</c>), internal caches may not be affected by the refresh.
/// </remarks>
bool Refresh(Document document);
/// <summary>
/// Save the specified document.
/// </summary>
/// <param name="document">Document to store</param>
/// <remarks>Non caching storage will store the Document with its full state.
/// Caching storage, e.g. SessionStorage, may only store changes to storage. Please refer to class specific documentation.
/// </remarks>
void Save(Document document);
/// <summary>
/// Delete the specified documentID from storage.
/// </summary>
/// <param name="documentID">Document identifier</param>
/// <remarks>Will remove the Document identified by <para>documentID</para> from storage.
/// </remarks>
void Delete(Guid documentID);
IDisposable Lock();
bool Contains(Guid documentID);
IEnumerable<Guid> GetDocumentIDs();
IEnumerable<Guid> GetDocumentIDs(string path,Predicate<ODBEntity> predicate);
DateTime GetStorageTimestamp(Guid documentID);
void EnsureIndex(params string[] path);
bool IsCaching { get; }
}
}

View File

@ -0,0 +1,23 @@
// /**
// * File: IStorageContainer.cs
// * Author: haraldwolff
// *
// * This file and it's content is copyrighted by the Author and / or copyright holder.
// * Any use wihtout proper permission is illegal and may lead to legal actions.
// *
// *
// **/
using System;
using System.Collections.Generic;
namespace ln.types.odb.ng.storage
{
public interface IStorageContainer : IDisposable
{
IStorageContainer Open();
void Close();
bool IsOpen { get; }
IStorage GetStorage(string storageName);
IEnumerable<String> GetStorageNames();
}
}

View File

@ -0,0 +1,227 @@
using System;
using System.IO;
using ln.types.btree;
using System.Collections.Generic;
using System.Linq;
using ln.objects.catalog;
namespace ln.types.odb.ng.storage
{
public class OrganizedFile : IDisposable
{
public String FileName { get; private set; }
public OrganizedFileHeader FileHeader { get; private set; } = new OrganizedFileHeader();
public int CurrentStoreLength { get; private set; }
FileStream fileStream;
FileStream lckFileStream;
MappingBTree<int, OrganizedFileArea> freeAreas = new MappingBTree<int, OrganizedFileArea>((a)=>a.Offset);
MappingBTree<int, OrganizedFileArea> usedAreas = new MappingBTree<int, OrganizedFileArea>((a) => a.Offset);
public OrganizedFile(string filename)
{
FileName = filename;
lckFileStream = new FileStream(string.Format("{0}.lck", FileName), FileMode.CreateNew);
fileStream = new FileStream(filename, FileMode.OpenOrCreate, FileAccess.ReadWrite);
if (fileStream.Length > 0)
{
FileHeader.Read(fileStream);
int nextOffset = FileHeader.FirstOffset;
while (nextOffset < fileStream.Length)
{
OrganizedFileArea fileArea = new OrganizedFileArea(fileStream, nextOffset);
if (fileArea.ReadTypeCode(fileStream) == 0)
freeAreas.Add(fileArea);
else
usedAreas.Add(fileArea);
nextOffset = fileArea.NextOffset;
}
CurrentStoreLength = nextOffset;
}
}
public void Close()
{
if (fileStream != null)
{
lock (fileStream)
{
FileHeader.Write(fileStream);
fileStream.Close();
fileStream.Dispose();
lckFileStream.Close();
lckFileStream.Dispose();
File.Delete(string.Format("{0}.lck", FileName));
freeAreas.Clear();
usedAreas.Clear();
}
fileStream = null;
lckFileStream = null;
}
}
public IEnumerable<OrganizedFileArea> UsedAreas => usedAreas;
public void Clear(OrganizedFileArea fileArea)
{
lock (fileStream)
{
usedAreas.TryRemove(fileArea);
fileArea.Clear(fileStream);
freeAreas.Add(fileArea);
}
}
public OrganizedFileArea Store(ODBEntity value)
{
byte[] storageBytes = value.GetStorageBytes();
return Store(storageBytes);
}
public OrganizedFileArea Store(byte[] bytes)
{
lock (fileStream)
{
OrganizedFileArea fileArea = freeAreas.Where((a) => a.MaxBytes >= bytes.Length).FirstOrDefault();
if (fileArea == null)
{
fileArea = new OrganizedFileArea(CurrentStoreLength, bytes.Length + 4);
CurrentStoreLength = fileArea.NextOffset;
}
else
{
freeAreas.Remove(fileArea);
}
fileArea.Store(fileStream, bytes);
usedAreas.Add(fileArea);
return fileArea;
}
}
public void Dispose() => Close();
public class OrganizedFileHeader
{
public readonly UInt32 CurrentFileMagic = 0x00EEFFC0;
public UInt32 Magic;
public OrganizedFileType FileType;
public Int32 FirstOffset;
public UInt32 Reserve0;
public UInt64 OpenCounter;
public OrganizedFileHeader()
{
Magic = CurrentFileMagic;
FileType = OrganizedFileType.DATA;
FirstOffset = 1024;
Reserve0 = 0;
OpenCounter = 0;
}
public void Read(FileStream stream)
{
stream.Position = 0;
Magic = stream.ReadUInteger();
if (Magic != CurrentFileMagic)
throw new FormatException("File Magic does not match! Possibly corrupted file!");
FileType = (OrganizedFileType)stream.ReadInteger();
FirstOffset = stream.ReadInteger();
Reserve0 = stream.ReadUInteger();
OpenCounter = stream.ReadULong();
OpenCounter++;
}
public void Write(FileStream stream)
{
stream.Position = 0;
stream.WriteUInteger(Magic);
stream.WriteInteger((int)FileType);
stream.WriteInteger(FirstOffset);
stream.WriteUInteger(Reserve0);
stream.WriteULong(OpenCounter);
}
}
public class OrganizedFileArea : IComparable<OrganizedFileArea>,IComparable
{
public Int32 Offset;
public Int32 Size;
public Int32 MaxBytes => Size - 4;
public Int32 NextOffset => Offset + Size;
public OrganizedFileArea(int offset,int size)
{
Offset = offset;
Size = size;
}
public OrganizedFileArea(FileStream fileStream,int offset)
{
Offset = offset;
fileStream.Position = Offset;
Size = fileStream.ReadInteger();
}
public int ReadTypeCode(FileStream fileStream)
{
fileStream.Position = Offset + 4;
return fileStream.ReadInteger();
}
public void Store(FileStream fileStream,byte[] buffer)
{
if (MaxBytes < buffer.Length)
throw new ArgumentException("Area too small for buffer to fit", nameof(buffer));
fileStream.Position = Offset;
fileStream.WriteInteger(Size);
fileStream.Write(buffer, 0, buffer.Length);
if (buffer.Length < MaxBytes)
{
byte[] padding = new byte[MaxBytes - buffer.Length];
fileStream.Write(padding, 0, padding.Length);
}
}
public byte[] Load(FileStream fileStream)
{
byte[] buffer = new byte[MaxBytes];
fileStream.Position = Offset + 4;
fileStream.Read(buffer, 0, buffer.Length);
return buffer;
}
public void Clear(FileStream fileStream)
{
byte[] zero = new byte[MaxBytes];
fileStream.WriteInteger(Size);
fileStream.Write(zero,0,zero.Length);
}
public int CompareTo(OrganizedFileArea other)
{
return Offset - other.Offset;
}
public int CompareTo(object obj)
{
if (obj is OrganizedFileArea)
{
return Offset - (obj as OrganizedFileArea).Offset;
}
throw new NotSupportedException();
}
}
}
}

View File

@ -0,0 +1,9 @@
using System;
namespace ln.types.odb.ng.storage
{
public enum OrganizedFileType : Int32
{
DATA,
INDEX
}
}

View File

@ -0,0 +1,308 @@
// /**
// * File: SegmentedFile.cs
// * Author: haraldwolff
// *
// * This file and it's content is copyrighted by the Author and / or copyright holder.
// * Any use wihtout proper permission is illegal and may lead to legal actions.
// *
// *
// **/
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using ln.logging;
using ln.types.btree;
namespace ln.types.odb.ng.storage
{
/**
* SegmentedFile
* ----------
* 0000 4 MAGIC Bytes
* 0004 4 Version
* 0008 8 LastCloseTimestamp
* 0010 4 FirstOffset
* 0014 4 GranularWidth
* 0018 8 Reserved 0
*
**/
public class SegmentedFile
{
public static byte[] MagicBytes { get; } = new byte[] { 0x0F, 0x0E, 0x0D, 0x0A };
public String FileName { get; }
public int FileVersion { get; private set; }
public long LastCloseTimestamp { get; private set; }
public int FirstOffset { get; private set; }
public int GranularWidth { get; private set; } = 12;
public int GranularityMask => (1 << GranularWidth) - 1;
public int AppendOffset { get; private set; }
public IEnumerable<Segment> Segments => segments;
MappingBTree<long, Segment> segments = new MappingBTree<long, Segment>((s) => s.Offset);
FileStream fileStream;
public SegmentedFile(string fileName)
{
FileName = fileName;
}
public SegmentedFile(string fileName,int granularWidth)
:this(fileName)
{
GranularWidth = granularWidth;
}
private void AssertOpen()
{
if (fileStream == null)
throw new IOException("FSStorage not opened");
}
private void CheckGranularity(ref int i){ i = (i + GranularityMask) & ~GranularityMask; }
public bool IsOpen => (fileStream != null);
public bool Open()
{
if (!IsOpen)
{
try
{
fileStream = new FileStream(FileName, FileMode.OpenOrCreate);
if (fileStream.Length == 0)
{
FileVersion = 0;
LastCloseTimestamp = 0;
FirstOffset = (1 << GranularWidth);
if (FirstOffset < 0x20)
throw new NotSupportedException("Granularity too small");
AppendOffset = FirstOffset;
Close();
return Open();
}
else
{
if (!fileStream.ReadBytes(4).SequenceEqual(MagicBytes))
throw new IOException("Magic bytes do not match");
FileVersion = fileStream.ReadInteger();
LastCloseTimestamp = fileStream.ReadLong();
FirstOffset = fileStream.ReadInteger();
GranularWidth = fileStream.ReadInteger();
Scan();
fileStream.Position = 8;
fileStream.WriteLong(0);
fileStream.Flush();
}
}
catch (Exception e)
{
Logging.Log(e);
if (fileStream != null)
{
fileStream.Close();
fileStream.Dispose();
fileStream = null;
}
return false;
}
return true;
}
return false;
}
public Segment Append(Guid id,byte[] payload) => Append(id, payload.Length, payload);
public Segment Append(Guid id, int dataSize) => Append(id, dataSize);
public Segment Append(Guid id, int dataSize, byte[] payload)
{
dataSize += Segment.HeaderSize;
CheckGranularity(ref dataSize);
Segment segment = new Segment(AppendOffset, dataSize) { ID = id, };
Write(segment, payload);
segments.Add(segment);
AppendOffset = segment.NextOffset;
return segment;
}
public Segment Join(Segment a,Segment b)
{
if (a.NextOffset != b.Offset)
throw new ArgumentException("Segments to join are not siblings");
a.Size += b.Size;
WriteSegmentHead(a);
segments.Remove(b);
return a;
}
public Segment Split(Segment segment,int dataSize)
{
int requestedSize = dataSize + Segment.HeaderSize;
CheckGranularity(ref requestedSize);
if (requestedSize < segment.Size)
{
Segment split = new Segment(segment.Offset + requestedSize,segment.Size - requestedSize);
segment.Size = requestedSize;
segments.Add(split);
WriteSegmentHead(split);
WriteSegmentHead(segment);
return split;
}
return null;
}
public byte[] Read(Segment segment)
{
fileStream.Position = segment.PayloadOffset;
return fileStream.ReadBytes(segment.PayloadSize);
}
private void WriteSegmentHead(Segment segment)
{
fileStream.Position = segment.Offset;
fileStream.WriteInteger(segment.Size);
fileStream.WriteBytes(segment.ID.ToByteArray());
fileStream.WriteDouble(segment.TimeStamp.ToUnixTimeMilliseconds());
}
public void Write(Segment segment,byte[] bytes)
{
AssertOpen();
if (bytes.Length > (segment.PayloadSize))
throw new ArgumentOutOfRangeException(nameof(bytes));
segment.TimeStamp = DateTime.Now;
WriteSegmentHead(segment);
fileStream.Position = segment.PayloadOffset;
fileStream.WriteBytes(bytes);
fileStream.WriteBytes(new byte[segment.PayloadSize - bytes.Length]);
}
/**
* Position fileStream to offset, read Segment Header and construct a Segment instance to return
**/
private Segment ScanSegment(int offset)
{
fileStream.Position = offset;
int size = fileStream.ReadInteger();
byte[] id = fileStream.ReadBytes(16);
double timestamp = fileStream.ReadDouble();
return new Segment(offset, size, DateTimeExtensions.FromUnixTimeMilliseconds(timestamp)) { ID = new Guid(id), };
}
/**
* Start at First Segment Offset and scan for all Segments in file
**/
private void Scan()
{
int offset = FirstOffset;
Segment segment = null;
while (offset < fileStream.Length)
{
segment = ScanSegment(offset);
segments.Add(segment);
offset = segment.NextOffset;
}
AppendOffset = offset;
}
public void Close()
{
lock (this){
AssertOpen();
fileStream.Position = 0;
fileStream.WriteBytes(MagicBytes);
fileStream.WriteInteger(FileVersion);
LastCloseTimestamp = (long)DateTime.Now.ToUnixTimeMilliseconds();
fileStream.WriteLong(LastCloseTimestamp);
fileStream.WriteInteger(FirstOffset);
fileStream.WriteInteger(GranularWidth);
fileStream.Close();
fileStream.Dispose();
fileStream = null;
}
}
public void Sync()
{
lock (this)
{
fileStream.Flush();
}
}
public class Segment
{
public static readonly int HeaderSize = 32;
public int Offset { get; }
public int Size { get; set; }
public int PayloadOffset => Offset + HeaderSize;
public int PayloadSize => Size - HeaderSize;
public Guid ID { get; set; }
public DateTime TimeStamp { get; set; }
public int NextOffset => Offset + Size;
public Segment(int offset, int size)
{
Offset = offset;
Size = size;
}
public Segment(int offset, int size,DateTime timestamp)
:this(offset,size)
{
TimeStamp = timestamp;
}
public Segment Split(int splitSize)
{
if (splitSize >= Size)
throw new ArgumentOutOfRangeException(nameof(splitSize));
Segment splitArea = new Segment(Offset + Size - splitSize, splitSize);
Size -= splitSize;
return splitArea;
}
public override string ToString()
{
return string.Format("[StorageArea Offset=0x{0:x8} Size=0x{1:x8}]", Offset, Size);
}
}
}
}

View File

@ -0,0 +1,45 @@
// /**
// * File: StorageArea.cs
// * Author: haraldwolff
// *
// * This file and it's content is copyrighted by the Author and / or copyright holder.
// * Any use wihtout proper permission is illegal and may lead to legal actions.
// *
// *
// **/
using System;
namespace ln.types.odb.ng.storage
{
public class StorageArea
{
public int Offset { get; }
public int Size { get; set; }
public Guid ID { get; set; }
public DateTime TimeStamp { get; set; }
public int NextOffset => Offset + Size;
public StorageArea(int offset,int size)
{
Offset = offset;
Size = size;
}
public StorageArea Split(int splitSize)
{
if (splitSize >= Size)
throw new ArgumentOutOfRangeException(nameof(splitSize));
StorageArea splitArea = new StorageArea(Offset + Size - splitSize,splitSize);
Size -= splitSize;
return splitArea;
}
public override string ToString()
{
return string.Format("[StorageArea Offset=0x{0:x8} Size=0x{1:x8}]",Offset,Size);
}
}
}

View File

@ -0,0 +1,79 @@
// /**
// * File: StorageAreaContainer.cs
// * Author: haraldwolff
// *
// * This file and it's content is copyrighted by the Author and / or copyright holder.
// * Any use wihtout proper permission is illegal and may lead to legal actions.
// *
// *
// **/
using System;
using ln.types.btree;
using ln.logging;
namespace ln.types.odb.ng.storage
{
public class StorageAreaContainer
{
public bool DEBUG = false;
public int SplitLimit { get; set; } = 32;
MappingBTree<int, StorageArea> storageAreas = new MappingBTree<int, StorageArea>((value)=>value.Offset);
public StorageAreaContainer()
{
}
public StorageArea Push(StorageArea storageArea)
{
if (DEBUG)
Logging.Log(LogLevel.DEBUG, "StorageAreaContainer: Push: {0}", storageArea);
storageAreas.Add(storageArea);
try
{
StorageArea previousStorageArea = storageAreas.Previous(storageArea);
if ((previousStorageArea != null) && (previousStorageArea.NextOffset == storageArea.Offset))
{
previousStorageArea.Size += storageArea.Size;
storageAreas.Remove(storageArea);
storageArea = previousStorageArea;
}
} catch
{
}
try
{
StorageArea nextStorageArea = storageAreas.Next(storageArea);
if ((nextStorageArea != null) && (storageArea.NextOffset == nextStorageArea.Offset))
{
storageArea.Size += nextStorageArea.Size;
storageAreas.Remove(nextStorageArea);
}
} catch
{ }
return storageArea;
}
public StorageArea Pop(int minSize)
{
foreach (StorageArea storageArea in storageAreas)
{
if (storageArea.Size >= minSize)
{
if (DEBUG)
Logging.Log(LogLevel.DEBUG, "StorageAreaContainer: Pop: {0}", storageArea);
storageAreas.RemoveKey(storageArea.Offset);
return storageArea;
}
}
return null;
}
}
}

View File

@ -0,0 +1,43 @@
using System;
using System.Collections.Generic;
using ln.objects.catalog;
using ln.types.threads;
namespace ln.types.odb.ng.storage.bases
{
public abstract class ChainedStorage : IStorage
{
public IStorage Storage { get; private set; }
public bool IsOpen => Storage.IsOpen;
public ChainedStorage(IStorage storage)
{
Storage = storage;
}
public virtual bool Open() => Storage.Open();
public virtual void Close() => Storage.Close();
public virtual Document Load(Guid documentID) => Storage.Load(documentID);
public virtual void Save(Document document) => Storage.Save(document);
public virtual void Delete(Guid documentID) => Storage.Delete(documentID);
public virtual bool Contains(Guid documentID) => Storage.Contains(documentID);
public virtual IEnumerable<Guid> GetDocumentIDs() => Storage.GetDocumentIDs();
public virtual IEnumerable<Guid> GetDocumentIDs(string path, Predicate<ODBEntity> predicate) => Storage.GetDocumentIDs(path, predicate);
public virtual DateTime GetStorageTimestamp(Guid documentID) => Storage.GetStorageTimestamp(documentID);
public virtual void EnsureIndex(params string[] path) => Storage.EnsureIndex(path);
public virtual bool Refresh(Document document) => Storage.Refresh(document);
public virtual IDisposable Lock() => new DisposableLock(this);
public virtual void Dispose()
{
Storage.Dispose();
Storage = null;
}
public abstract bool IsCaching { get; }
}
}

View File

@ -0,0 +1,60 @@
using System;
using System.Collections.Generic;
using System.Linq;
using ln.types.btree;
using System.Threading;
using System.Runtime.ConstrainedExecution;
using ln.types.threads;
using ln.objects.catalog;
namespace ln.types.odb.ng.storage.bases
{
public abstract class StorageBase : IStorage
{
BTree<Guid> lockedDocuments = new BTree<Guid>();
public StorageBase()
{
}
public abstract bool Open();
public abstract void Close();
public abstract bool IsOpen { get; }
public abstract Document Load(Guid documentID);
public abstract void Save(Document document);
public abstract void Delete(Guid documentID);
public virtual DateTime GetStorageTimestamp(Guid documentID)
{
Document doc = Load(documentID);
return doc.StorageTimeStamp;
}
/* Enumeration */
public abstract IEnumerable<Guid> GetDocumentIDs();
public abstract IEnumerable<Guid> GetDocumentIDs(string path, Predicate<ODBEntity> predicate);
/* Indeces */
public abstract void EnsureIndex(params string[] path);
public virtual void Dispose()
{
if (IsOpen)
Close();
}
public virtual bool Contains(Guid documentID)
{
return GetDocumentIDs().Contains(documentID);
}
public virtual IDisposable Lock()
{
return new DisposableLock(this);
}
public abstract bool Refresh(Document document);
public abstract bool IsCaching { get; }
}
}

View File

@ -0,0 +1,97 @@
using System;
using System.Collections.Generic;
using ln.types.odb.ng.storage.bases;
using ln.types.btree;
using ln.types.cache;
using NUnit.Framework;
using ln.types.threads;
using ln.objects.catalog;
namespace ln.types.odb.ng.storage.cache
{
public class CachingStorage : StorageBase
{
public int CacheSize => cache.Count;
public int MaxCacheSize
{
get => cache.MaxCacheSize;
set => cache.MaxCacheSize = value;
}
IStorage storage;
Cache<Guid, Document> cache = new Cache<Guid, Document>();
public CachingStorage(IStorage storage)
{
this.storage = storage;
}
public override bool IsOpen => storage.IsOpen;
public override void Close()
{
if (IsOpen)
{
cache.Clear();
storage.Close();
}
}
public override void Delete(Guid documentID)
{
lock (this)
{
storage.Delete(documentID);
cache.Forget(documentID);
}
}
public override void EnsureIndex(params string[] path) => storage.EnsureIndex(path);
public override IEnumerable<Guid> GetDocumentIDs() => storage.GetDocumentIDs();
public override IEnumerable<Guid> GetDocumentIDs(string path, Predicate<ODBEntity> predicate) => storage.GetDocumentIDs(path, predicate);
public override Document Load(Guid documentID)
{
if (!Contains(documentID))
throw new KeyNotFoundException();
Document document = null;
if (!cache.TryGet(documentID,out document))
document = storage.Load(documentID);
cache.Ensure(documentID, document);
return document;
}
public override bool Open()
{
return storage.Open();
}
public override void Save(Document document)
{
lock (this)
{
storage.Save(document);
}
}
public override bool Contains(Guid documentID) => storage.Contains(documentID);
public override void Dispose()
{
storage.Dispose();
cache.Clear();
cache = null;
}
public override bool Refresh(Document document)
{
return storage.Refresh(document);
}
public override bool IsCaching => true;
}
}

View File

@ -0,0 +1,145 @@
// /**
// * File: FSStorage.cs
// * Author: haraldwolff
// *
// * This file and it's content is copyrighted by the Author and / or copyright holder.
// * Any use wihtout proper permission is illegal and may lead to legal actions.
// *
// *
// **/
using System;
using System.Collections.Generic;
using System.IO;
using System.Diagnostics;
using ln.logging;
using ln.types.odb.ng.storage.cache;
namespace ln.types.odb.ng.storage.fs
{
public class FSStorageContainer : IStorageContainer,IDisposable
{
public string BasePath { get; }
public int DefaultCacheSize { get; set; }
FileStream lockFile;
Dictionary<string, IStorage> storages = new Dictionary<string, IStorage>();
public FSStorageContainer(string basePath)
{
BasePath = basePath;
}
public bool IsOpen => lockFile != null;
private void AssertOpen()
{
if (!IsOpen)
throw new IOException("FSStorage not open");
}
public void Close()
{
lock (this)
{
AssertOpen();
foreach (IStorage storage in storages.Values)
{
if (storage.IsOpen)
storage.Close();
storage.Dispose();
}
if (lockFile != null)
{
lockFile.Close();
lockFile.Dispose();
lockFile = null;
}
}
}
public IStorage GetStorage(string storageName)
{
lock (this)
{
AssertOpen();
if (!storages.ContainsKey(storageName))
{
IStorage storage = new SegmentedFileStorage(Path.Combine(BasePath, storageName));
if (DefaultCacheSize > 0)
storage = new CachingStorage(storage) { MaxCacheSize = DefaultCacheSize, };
storages.Add(storageName, storage);
}
if (!storages[storageName].IsOpen)
storages[storageName].Open();
return storages[storageName];
}
}
public IEnumerable<string> GetStorageNames()
{
lock (this)
{
AssertOpen();
return storages.Keys;
}
}
public IStorageContainer Open()
{
lock (this)
{
if (!IsOpen)
{
if (!Directory.Exists(BasePath))
Directory.CreateDirectory(BasePath);
try
{
lockFile = new FileStream(Path.Combine(BasePath, ".lock"), FileMode.CreateNew, FileAccess.ReadWrite, FileShare.Read, 1024, FileOptions.DeleteOnClose);
lockFile.WriteInteger(Process.GetCurrentProcess().Id);
lockFile.Flush();
} catch (IOException)
{
if (File.Exists(Path.Combine(BasePath, ".lock")))
{
lockFile = new FileStream(Path.Combine(BasePath, ".lock"), FileMode.Open, FileAccess.Read, FileShare.Read);
int lockPID = lockFile.ReadInteger();
lockFile.Close();
Process lockProcess = null;
try
{
lockProcess = Process.GetProcessById(lockPID);
} catch (Exception)
{
}
if ((lockProcess != null) && !lockProcess.HasExited)
throw;
Logging.Log(LogLevel.DEBUG, "FSStorageContainer: Ignoring stale lock file: PID:{0}@{1}", lockPID,Path.Combine(BasePath, ".lock"));
lockFile = new FileStream(Path.Combine(BasePath, ".lock"), FileMode.Truncate, FileAccess.ReadWrite, FileShare.Read, 1024, FileOptions.DeleteOnClose);
lockFile.WriteInteger(Process.GetCurrentProcess().Id);
lockFile.Flush();
}
}
}
}
return this;
}
public void Dispose()
{
if (IsOpen)
Close();
storages.Clear();
}
}
}

View File

@ -0,0 +1,375 @@
// /**
// * File: FSSTorage.cs
// * Author: haraldwolff
// *
// * This file and it's content is copyrighted by the Author and / or copyright holder.
// * Any use wihtout proper permission is illegal and may lead to legal actions.
// *
// *
// **/
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using ln.logging;
using ln.types.odb.ng.index;
using ln.types.odb.ng.storage.bases;
using ln.types.btree;
using ln.types.threads;
using ln.objects.catalog;
namespace ln.types.odb.ng.storage
{
/**
* FSStorage
*
* Directory Layout:
*
* <BasePath>/<StorageName>
* /data.odb Serialized Document Data
* /data.idx Serialized Lookup Index for Documents and Free Areas
*
* data.odb
* ----------
* 0000 4 MAGIC Bytes
* 0004 4 Version
* 0008 8 LastCloseTimestamp
* 0010 4 FirstOffset
* 0014 4 GranularWidth
* 0018 8 Reserved 0
*
**/
public class SegmentedFileStorage : StorageBase
{
public String StoragePath { get; }
public String DataFileName => System.IO.Path.Combine(StoragePath, "data.odb");
public override bool IsCaching => false;
public bool AutoFlush { get; set; } = true;
SegmentedFile segmentedFile;
MappingBTree<int, SegmentedFile.Segment> unusedSegments = new MappingBTree<int, SegmentedFile.Segment>((s)=>s.Offset);
MappingBTree<Guid, SegmentedFile.Segment> usedSegments = new MappingBTree<Guid, SegmentedFile.Segment>((s)=>s.ID);
IndexPath.DocumentPath indexRoot = new IndexPath.DocumentPath();
public SegmentedFileStorage(string storagePath)
{
StoragePath = storagePath;
}
private void AssertOpen()
{
if (!IsOpen)
throw new IOException("Not open");
}
public override bool IsOpen => ((segmentedFile != null) && segmentedFile.IsOpen);
public override bool Open()
{
if (!IsOpen)
{
try
{
if (!Directory.Exists(StoragePath))
Directory.CreateDirectory(StoragePath);
segmentedFile = new SegmentedFile(DataFileName);
segmentedFile.Open();
foreach (SegmentedFile.Segment segment in segmentedFile.Segments)
{
if (Guid.Empty.Equals(segment.ID))
{
unusedSegments.Add(segment);
}
else
{
if (usedSegments.TryGet(segment.ID, out SegmentedFile.Segment existing))
{
if (existing.TimeStamp < segment.TimeStamp)
{
existing.ID = Guid.Empty;
segmentedFile.Write(existing, new byte[0]);
usedSegments.RemoveKey(existing.ID);
unusedSegments.Add(existing);
}
else
{
segment.ID = Guid.Empty;
segmentedFile.Write(segment, new byte[0]);
unusedSegments.Add(segment);
}
}
else
{
usedSegments.Add(segment);
}
}
}
if (File.Exists(System.IO.Path.Combine(StoragePath, "indeces.lst")))
{
bool needsRebuild = false;
using (FileStream indexLst = new FileStream(System.IO.Path.Combine(StoragePath, "indeces.lst"), FileMode.Open))
{
byte[] indexLstBytes = indexLst.ReadBytes((int)indexLst.Length);
ODBList idxList = new ODBList(indexLstBytes, 0, indexLstBytes.Length);
foreach (ODBEntity indexName in idxList)
{
indexRoot.Ensure(IndexPath.SplitPath(indexName.As<String>()));
}
}
foreach (Index index in indexRoot.GetIndeces())
{
if (!index.LoadIndex(StoragePath, segmentedFile.LastCloseTimestamp))
needsRebuild = true;
}
if (needsRebuild)
RebuildIndeces();
}
return true;
} catch (Exception)
{
segmentedFile?.Close();
segmentedFile = null;
usedSegments.Clear();
unusedSegments.Clear();
throw;
}
}
return false;
}
public override void Close()
{
lock (this){
AssertOpen();
segmentedFile.Close();
List<String> indexNames = new List<string>();
foreach (Index index in indexRoot.GetIndeces())
{
indexNames.Add(index.IndexName);
index.SaveIndex(StoragePath, segmentedFile.LastCloseTimestamp);
}
ODBList indexList = new ODBList();
indexList.AddRange(indexNames.Select((x) => ODBEntity.FromNative(x)));
FileStream indexLst = new FileStream(System.IO.Path.Combine(StoragePath, "indeces.lst"), FileMode.Create);
indexLst.WriteBytes(indexList.GetStorageBytes());
indexLst.Close();
indexLst.Dispose();
}
}
public void Sync()
{
lock (this)
{
segmentedFile.Sync();
}
}
public override IEnumerable<Guid> GetDocumentIDs()
{
lock (this)
{
return usedSegments.Keys.ToArray();
}
}
public override Document Load(Guid documentID)
{
lock (this)
{
if (!usedSegments.TryGet(documentID,out SegmentedFile.Segment segment))
throw new KeyNotFoundException();
return LoadDocument(segment);
}
}
public override bool Contains(Guid documentID)
{
lock (this)
{
return usedSegments.ContainsKey(documentID);
}
}
private Document LoadDocument(SegmentedFile.Segment segment)
{
byte[] storageBytes = segmentedFile.Read(segment);
try
{
return new Document(storageBytes) { StorageTimeStamp = segment.TimeStamp, };
} catch (Exception e)
{
Logging.Log(LogLevel.DEBUG, "Exception while Deserializing Document from FSStorage: {1} ID={0}",segment.ID,StoragePath);
Logging.Log(LogLevel.DEBUG, "StorageArea: {0}", segment);
Logging.Log(e);
throw;
}
}
public override void Save(Document document)
{
lock (this)
{
byte[] storageBytes = document.GetStorageBytes();
SegmentedFile.Segment segment = PopUnusedSegment(storageBytes.Length);
if (segment == null)
{
segment = segmentedFile.Append(document.ID,storageBytes);
}
else
{
segment.ID = document.ID;
segmentedFile.Write(segment,storageBytes);
}
indexRoot.Replace(document.ID, document);
if (usedSegments.TryGet(document.ID,out SegmentedFile.Segment previousSegment))
{
usedSegments.RemoveKey(document.ID);
previousSegment.ID = Guid.Empty;
segmentedFile.Write(previousSegment,new byte[0]);
PushUnusedSegment(previousSegment);
}
document.StorageTimeStamp = segment.TimeStamp;
usedSegments.Add(segment);
if (AutoFlush)
segmentedFile.Sync();
}
}
public override void Delete(Guid documentID)
{
lock (this)
{
if (usedSegments.TryGet(documentID, out SegmentedFile.Segment segment))
{
usedSegments.RemoveKey(documentID);
segment.ID = Guid.Empty;
segmentedFile.Write(segment, new byte[0]);
indexRoot.Remove(documentID);
PushUnusedSegment(segment);
if (AutoFlush)
segmentedFile.Sync();
}
}
}
public override bool Refresh(Document document)
{
Load(document.ID).CloneTo(document);
return true;
}
private SegmentedFile.Segment PopUnusedSegment(int payloadSize)
{
foreach (SegmentedFile.Segment segment in unusedSegments)
{
if (segment.PayloadSize >= payloadSize)
{
unusedSegments.Remove(segment);
return segment;
}
}
return null;
}
private void PushUnusedSegment(SegmentedFile.Segment segment)
{
unusedSegments.Add(segment);
}
public override DateTime GetStorageTimestamp(Guid documentID)
{
if (usedSegments.ContainsKey(documentID))
return usedSegments[documentID].TimeStamp;
return default(DateTime);
}
public override IEnumerable<Guid> GetDocumentIDs(string path, Predicate<ODBEntity> predicate)
{
lock (this)
{
index.Path p = index.IndexPath.SplitPath(path);
if (indexRoot.Indexed(p))
{
return indexRoot.GetDocumentIDs(p, predicate);
}
else
{
HashSet<Guid> documentIDs = new HashSet<Guid>();
IEnumerable<Guid> ids = GetDocumentIDs();
foreach (Guid documentID in ids)
{
Document document = Load(documentID);
if (predicate(document[path]))
documentIDs.Add(documentID);
}
return documentIDs;
}
}
}
public override void EnsureIndex(params string[] paths)
{
lock (this)
{
bool needsRebuild = false;
foreach (String path in paths)
{
if (indexRoot.Ensure(IndexPath.SplitPath(path)))
needsRebuild = true;
}
if (needsRebuild)
RebuildIndeces();
}
}
public void RebuildIndeces()
{
Logging.Log(LogLevel.INFO, "FSStorage: RebuildIndeces()");
foreach (Guid documentID in GetDocumentIDs())
{
Document document = Load(documentID);
indexRoot.Replace(documentID, document);
}
}
}
}

View File

@ -0,0 +1,84 @@
using System;
using System.Collections.Generic;
using ln.types.odb.values;
using System.Linq;
using ln.types.odb.ng.storage.bases;
using ln.types.threads;
using ln.types.odb.ng.diff;
using ln.types.collections;
namespace ln.types.odb.ng.storage.session
{
public class SessionStorage : ChainedStorage
{
public SessionStorageContainer SessionContainer { get; }
WeakKeyDictionary<Document, Document> cache = new WeakKeyDictionary<Document, Document>();
public SessionStorage(SessionStorageContainer session, IStorage storage)
:base(storage)
{
SessionContainer = session;
}
public override Document Load(Guid documentID)
{
lock (this)
{
if (!Storage.Contains(documentID))
throw new KeyNotFoundException();
Document keyDocument = new Document(documentID);
if (cache.ContainsKey(keyDocument))
return cache.GetKeyInstance(keyDocument);
Document document = Storage.Load(documentID);
Document cacheDocument = document.Clone() as Document;
cache.Add(document, cacheDocument);
return document;
}
}
public override void Save(Document document)
{
lock (this)
{
if (cache.ContainsKey(document))
{
using (Storage.Lock())
{
Document cacheDocument = cache[document];
Document storageDocument = Storage.Load(document.ID);
DocumentDiff documentDiff = new DocumentDiff(cacheDocument, document);
documentDiff.Apply(storageDocument);
Storage.Save(storageDocument);
cache[document] = document.Clone() as Document;
}
}
else
{
Storage.Save(document);
cache.Add(document, document.Clone() as Document);
}
}
}
public override void Delete(Guid documentID)
{
lock (this)
{
Document keyDocument = new Document(documentID);
Storage.Delete(documentID);
cache.Remove(keyDocument);
}
}
public override bool IsCaching => true;
}
}

View File

@ -0,0 +1,67 @@
using System;
using System.Collections.Generic;
using ln.types.odb.values;
using ln.logging;
using System.Linq;
namespace ln.types.odb.ng.storage.session
{
public enum SessionSynchronisationStrategy
{
BIDIRECTIONAL,
FORWARDONLY,
REJECTCHANGED
}
public class SessionStorageContainer : IStorageContainer
{
public IStorageContainer StorageContainer { get; private set; }
public Mapper ODBMapper { get; private set; }
public SessionSynchronisationStrategy SynchronisationStrategy { get; set; } = SessionSynchronisationStrategy.REJECTCHANGED;
public bool IsOpen => StorageContainer.IsOpen;
public SessionStorageContainer(IStorageContainer storageContainer)
{
StorageContainer = storageContainer;
ODBMapper = Mapper.Default;
}
private Dictionary<string, IStorage> storages = new Dictionary<string, IStorage>();
public IStorage GetStorage(string storageName)
{
if (storages.ContainsKey(storageName))
return storages[storageName];
IStorage storage = StorageContainer.GetStorage(storageName);
storages.Add(storageName, new SessionStorage(this,storage));
if (!storage.IsOpen)
storage.Open();
return storages[storageName];
}
public IStorageContainer Open()
{
StorageContainer.Open();
return this;
}
public void Close()
{
StorageContainer.Close();
}
public IEnumerable<string> GetStorageNames()
{
return StorageContainer.GetStorageNames();
}
public void Dispose()
{
}
}
}

11
query/Criterion.cs 100644
View File

@ -0,0 +1,11 @@
using System;
using System.Collections.Generic;
using System.Text;
namespace ln.objects.query
{
public abstract class Criterion
{
public abstract bool Test(object reference, object value);
}
}

View File

@ -0,0 +1,13 @@
using System;
using System.Collections.Generic;
using System.Text;
namespace ln.objects.query
{
public class EqualsCriterion : Criterion
{
public override bool Test(object reference, object value) => object.Equals(reference, value);
}
}

View File

@ -0,0 +1,39 @@
using ln.type;
using System;
using System.Collections.Generic;
namespace ln.objects.serialization
{
public delegate bool LookupObjectByReference(object reference, Type objectType, out object o);
public abstract class Deserializer
{
public event LookupObjectByReference OnLookupObjectByReference;
public abstract bool DeserializeObject(byte[] serializedBytes, ref object o);
public bool TryLookupObject(object reference, Type targetType, out object o)
{
foreach (LookupObjectByReference lo in OnLookupObjectByReference?.GetInvocationList() ?? new LookupObjectByReference[0])
{
if (lo(reference, targetType, out o))
return true;
}
o = null;
return false;
}
public virtual bool TryGetType(byte[] serializedBytes,out Type type)
{
object o = null;
if (DeserializeObject(serializedBytes,ref o) && (!object.ReferenceEquals(null,o)))
{
type = o.GetType();
return true;
}
type = null;
return false;
}
}
}

View File

@ -0,0 +1,40 @@
using ln.objects.catalog;
using System;
using System.Collections.Generic;
using System.Text;
using System.Xml;
namespace ln.objects.serialization
{
public delegate bool LookupReference(object value, out object reference);
public delegate bool MangleValue(Type expectedType, ref object value);
public abstract class Serializer
{
public event LookupReference OnLookupReference;
public event MangleValue OnMangleValue;
public abstract bool SerializeObject(object o, out byte[] serializedBytes);
public bool TryLookupReference(object o, out object reference)
{
foreach (LookupReference lr in OnLookupReference?.GetInvocationList() ?? new LookupReference[0])
{
if (lr(o, out reference))
return true;
}
reference = null;
return false;
}
public bool MangleValue(Type expectedType,ref object value)
{
bool mangled = false;
foreach (MangleValue mv in OnMangleValue?.GetInvocationList() ?? new MangleValue[0])
if (mv(expectedType, ref value))
mangled = true;
return mangled;
}
}
}

View File

@ -0,0 +1,246 @@
using ln.type;
using System;
using System.IO;
using System.Reflection;
using System.Runtime.Serialization;
using System.Text;
namespace ln.objects.serialization.binary
{
public class BinaryDeserializer : Deserializer
{
public override bool DeserializeObject(byte[] serializedBytes, ref object o)
{
MemoryStream ms = new MemoryStream(serializedBytes);
if (!Object.ReferenceEquals(null,o) && (serializedBytes[0] == 'S'))
{
ms.ReadByte();
String typeName = (string)Deserialize(ms);
DeserializeStructured(ms, o.GetType(), o);
}
else
{
o = Deserialize(ms);
}
return true;
}
public override bool TryGetType(byte[] serializedBytes, out Type type) => TryGetType(new MemoryStream(serializedBytes), out type);
public bool TryGetType(Stream stream, out Type type)
{
int tc;
switch (tc = stream.ReadByte())
{
case -1:
throw new EndOfStreamException();
case '0':
type = null;
break;
case 'P':
return TryGetPrimitiveType(stream, out type);
case 'R':
case 'S':
case 'E':
type = Type.GetType(Deserialize(stream) as string);
break;
case 'A':
type = Type.GetType(Deserialize(stream) as string).MakeArrayType();
break;
case 'B':
type = typeof(byte[]);
break;
case 'G':
type = typeof(Guid);
break;
default:
type = null;
return false;
}
return true;
}
public bool TryGetPrimitiveType(Stream stream, out Type type)
{
int tc;
switch (tc = stream.ReadByte())
{
case -1:
throw new EndOfStreamException();
case 'I':
type = typeof(int);
break;
case 'i':
type = typeof(uint);
break;
case 'B':
type = typeof(byte);
break;
case 'C':
type = typeof(char);
break;
case 'S':
type = typeof(short);
break;
case 's':
type = typeof(ushort);
break;
case 'L':
type = typeof(long);
break;
case 'l':
type = typeof(ulong);
break;
case 'F':
type = typeof(float);
break;
case 'D':
type = typeof(double);
break;
case 'b':
type = typeof(bool);
break;
case 'T':
type = typeof(string);
break;
default:
type = null;
return false;
}
return true;
}
object Deserialize(Stream stream)
{
int tc;
switch (tc = stream.ReadByte())
{
case -1:
throw new EndOfStreamException();
case '0':
return null;
case 'P':
return DeserializePrimitive(stream);
case 'R':
return DeserializeReference(stream);
case 'S':
return DeserializeStructured(stream);
case 'E':
return DeserializeEnum(stream);
case 'A':
return DeserializeArray(stream);
case 'B':
return DeserializeByteArray(stream);
case 'G':
return DeserializeGuid(stream);
default:
throw new NotSupportedException(String.Format("Unsupported type code: {0}", (char)tc));
}
}
object DeserializePrimitive(Stream stream)
{
int tc;
switch (tc = stream.ReadByte())
{
case -1:
throw new EndOfStreamException();
case 'I':
return stream.ReadInteger();
case 'i':
return stream.ReadUInteger();
case 'B':
return (byte)stream.ReadByte();
case 'C':
return (char)stream.ReadShort();
case 'S':
return stream.ReadShort();
case 's':
return stream.ReadUShort();
case 'L':
return stream.ReadLong();
case 'l':
return stream.ReadULong();
case 'F':
return stream.ReadFloat();
case 'D':
return stream.ReadDouble();
case 'b':
return stream.ReadByte() != 0;
case 'T':
int tl = stream.ReadInteger();
byte[] tbytes = stream.ReadBytes(tl);
return Encoding.UTF8.GetString(tbytes);
default:
throw new NotSupportedException(String.Format("Unsupported primitive type code: {0}", (char)tc));
}
}
object DeserializeGuid(Stream stream) => new Guid(stream.ReadBytes(16));
object DeserializeEnum(Stream stream)
{
Type eType = Type.GetType(Deserialize(stream) as string);
object value = Deserialize(stream);
if (value is int nvalue)
return Enum.ToObject(eType, nvalue);
else if (value is string svalue)
return Enum.Parse(eType, svalue);
else
throw new NotSupportedException("Unsupported ENUM repressentation found");
}
object DeserializeByteArray(Stream stream)
{
int length = stream.ReadInteger();
return stream.ReadBytes(length);
}
object DeserializeArray(Stream stream)
{
Type etype = Type.GetType(Deserialize(stream) as string);
int length = stream.ReadInteger();
Array array = Array.CreateInstance(etype, length);
for (int n = 0; n < array.Length; n++)
array.SetValue(Deserialize(stream), n);
return array;
}
object DeserializeStructured(Stream stream)
{
String typeName = (string)Deserialize(stream);
Type type = Type.GetType(typeName);
object o = type.IsValueType ? Activator.CreateInstance(type) : Activator.CreateInstance(type, true);
DeserializeStructured(stream, type, o);
return o;
}
void DeserializeStructured(Stream stream, Type type, object o)
{
int nFields = stream.ReadInteger();
for (int n=0;n<nFields;n++)
{
FieldInfo fieldInfo = type.GetField(Deserialize(stream) as string,BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic);
fieldInfo.SetValue(o, Deserialize(stream));
}
}
object DeserializeReference(Stream stream)
{
Type type = Type.GetType(Deserialize(stream) as string);
object reference = Deserialize(stream);
if (TryLookupObject(reference, type, out object o))
return o;
throw new SerializationException(String.Format("Could not lookup reference {0}", reference));
}
}
}

View File

@ -0,0 +1,164 @@
using ln.type;
using System;
using System.Collections.Generic;
using System.IO;
using System.Reflection;
using System.Text;
namespace ln.objects.serialization.binary
{
public class BinarySerializer : Serializer
{
public override bool SerializeObject(object o, out byte[] serializedBytes)
{
MemoryStream serializedStream = new MemoryStream();
Type type = o.GetType();
if (type.IsPrimitive || (type == typeof(string)))
SerializePrimitive(serializedStream, o);
else if (type.IsEnum)
SerializeEnum(serializedStream, o);
else if (type.IsValueType)
SerializeValue(serializedStream, o);
else if (type.IsArray)
SerializeArray(serializedStream, o);
else
SerializeStructured(serializedStream, o);
serializedBytes = serializedStream.ToArray();
return true;
}
void Serialize(Stream stream, object o)
{
if (Object.ReferenceEquals(null, o))
{
stream.WriteByte('0');
}
else
{
Type type = o.GetType();
if (type.IsPrimitive || (type == typeof(string)))
SerializePrimitive(stream, o);
else if (type.IsEnum)
SerializeEnum(stream, o);
else if (type.IsValueType)
SerializeValue(stream, o);
else if (type.IsArray)
SerializeArray(stream, o);
else
{
if (TryLookupReference(o, out object reference))
{
stream.WriteByte('R');
SerializePrimitive(stream, type.GetSimpleQualifiedName());
Serialize(stream, reference);
}
else
{
SerializeStructured(stream, o);
}
}
}
}
void SerializePrimitive(Stream stream, object value)
{
stream.WriteByte('P');
if (value is int i) { stream.WriteByte('I'); stream.WriteInteger(i); }
else if (value is uint ui) { stream.WriteByte('i'); stream.WriteUInteger(ui); }
else if (value is byte b) { stream.WriteByte('B'); stream.WriteByte(b); }
else if (value is char ch) { stream.WriteByte('C'); stream.WriteShort((short)ch); }
else if (value is short sh) { stream.WriteByte('S'); stream.WriteShort(sh); }
else if (value is ushort us) { stream.WriteByte('s'); stream.WriteUShort(us); }
else if (value is long il) { stream.WriteByte('L'); stream.WriteLong(il); }
else if (value is ulong ul) { stream.WriteByte('l'); stream.WriteULong(ul); }
else if (value is float f) { stream.WriteByte('F'); stream.WriteFloat(f); }
else if (value is double d) { stream.WriteByte('D'); stream.WriteDouble(d); }
else if (value is bool bo) { stream.WriteByte('b'); stream.WriteByte(bo ? (byte)0x01 : (byte)0x00); }
else if (value is string str)
{
stream.WriteByte('T');
byte[] bytes = Encoding.UTF8.GetBytes(str);
stream.WriteInteger(bytes.Length);
stream.WriteBytes(bytes);
}
else
throw new NotSupportedException(String.Format("Unsupported primitive type: {0}", value.GetType().Name));
// ToDo: Add struct System.Decimal
}
void SerializeEnum(Stream stream, object value)
{
Type eType = value.GetType();
stream.WriteByte('E');
SerializePrimitive(stream, eType.GetSimpleQualifiedName());
if (eType.GetCustomAttribute<FlagsAttribute>() != null)
SerializePrimitive(stream, (int)value);
else
SerializePrimitive(stream, value.ToString());
}
void SerializeArray(Stream stream, object value)
{
if (value is byte[] ba)
{
stream.WriteByte('B');
stream.WriteInteger(ba.Length);
stream.WriteBytes(ba);
}
else
{
Array array = (Array)value;
stream.WriteByte('A');
SerializePrimitive(stream, array.GetType().GetElementType().GetSimpleQualifiedName());
stream.WriteInteger(array.Length);
for (int n = 0; n < array.Length; n++)
Serialize(stream, array.GetValue(n));
}
}
void SerializeValue(Stream stream, object value)
{
Type valueType = value.GetType();
if (value is Guid guid)
{
stream.WriteByte('G');
stream.WriteBytes(guid.ToByteArray());
}
else
SerializeStructured(stream, value);
}
void SerializeStructured(Stream stream, object value)
{
Type type = value.GetType();
FieldInfo[] fields = type.GetFields(BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public);
stream.WriteByte('S');
SerializePrimitive(stream, type.GetSimpleQualifiedName());
stream.WriteInteger(fields.Length);
foreach (FieldInfo fieldInfo in fields)
{
object v = fieldInfo.GetValue(value);
if (MangleValue(fieldInfo.FieldType, ref v))
fieldInfo.SetValue(value, v);
SerializePrimitive(stream, fieldInfo.Name);
Serialize(stream, v);
}
}
}
}

View File

@ -0,0 +1,75 @@
using ln.json;
using ln.json.mapping;
using ln.logging;
using System;
using System.Collections.Generic;
using System.Reflection;
using System.Text;
namespace ln.objects.serialization.json
{
public class JSONDeserializer : Deserializer
{
ObjectStore ObjectStore;
public JSONMapper Mapper { get; }
public JSONDeserializer(ObjectStore objectStore)
{
ObjectStore = objectStore;
Mapper = new JSONMapper() { DefaultBindingFlags = BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic, DefaultMappingFlags = JSONObjectMappingFlags.FIELDS };
Mapper.OnRequestCustomUnserialization += Mapper_OnRequestCustomUnserialization;
Mapper.AddMappingFactory(
typeof(IList<>),
(Type targetType, out JSONMapping mapping) =>
{
mapping = (JSONMapping)Activator.CreateInstance(typeof(LazyListMapping<>).MakeGenericType(targetType.GetGenericArguments()),objectStore);
return false;
});
}
private bool Mapper_OnRequestCustomUnserialization(JSONValue json, Type targetType, out object o)
{
if (!(json is JSONNull) && !targetType.IsValueType && !targetType.IsInterface && Mapper.GetOrBuildMapping(targetType, out JSONMapping mapping) && mapping is JSONObjectMapping)
{
Guid uid = new Guid(Convert.FromBase64String((json as JSONString).Value));
return TryLookupObject(uid, targetType, out o);
}
o = null;
return false;
}
public override bool DeserializeObject(byte[] serializedBytes, ref object o)
{
throw new NotImplementedException();
/*
if (ObjectStore.DEBUG)
Logging.Log(LogLevel.DEBUG, "Deserializing: {0}", Encoding.UTF8.GetString(serializedBytes));
if (serializedBytes.Length == 0)
{
o = null;
return true;
}
if (Mapper.GetOrBuildMapping(targetType, out JSONMapping mapping))
{
JSONValue json = JSONParser.Parse(Encoding.UTF8.GetString(serializedBytes));
if (mapping is JSONObjectMapping objectMapping)
{
objectMapping.Apply(Mapper, json as JSONObject, o);
}
else
{
o = mapping.FromJson(Mapper, json);
}
return true;
}
throw new NotSupportedException();
*/
}
}
}

View File

@ -0,0 +1,67 @@
using ln.json;
using ln.json.mapping;
using ln.logging;
using System;
using System.Text;
using System.Reflection;
using System.Collections.Generic;
namespace ln.objects.serialization.json
{
public class JSONSerializer : Serializer
{
public JSONMapper Mapper { get; }
public ObjectStore ObjectStore { get; }
public JSONSerializer(ObjectStore objectStore)
{
ObjectStore = objectStore;
Mapper = new JSONMapper() { DefaultBindingFlags = BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic, DefaultMappingFlags = JSONObjectMappingFlags.FIELDS };
Mapper.OnRequestCustomSerialization += Mapper_OnRequestCustomSerialization;
Mapper.AddMappingFactory(
typeof(IList<>),
(Type targetType, out JSONMapping mapping) =>
{
mapping = (JSONMapping)Activator.CreateInstance(typeof(LazyListMapping<>).MakeGenericType(targetType.GetGenericArguments()), objectStore);
return false;
});
}
private bool Mapper_OnRequestCustomSerialization(object o, out JSONValue json)
{
if (!ReferenceEquals(null, o) && !o.GetType().IsValueType && !o.GetType().IsInterface && Mapper.GetOrBuildMapping(o.GetType(), out JSONMapping mapping) && mapping is JSONObjectMapping)
{
if (TryLookupReference(o, out object reference))
{
json = new JSONString(Convert.ToBase64String(((Guid)reference).ToByteArray()));
return true;
}
}
json = null;
return false;
}
public override bool SerializeObject(object o, out byte[] serializedBytes)
{
if (ReferenceEquals(null, o))
{
serializedBytes = new byte[0];
return true;
}
if (Mapper.GetOrBuildMapping(o.GetType(), out JSONMapping mapping))
{
JSONValue json = mapping.ToJson(Mapper, o);
serializedBytes = Encoding.UTF8.GetBytes(json.ToString());
if (ObjectStore.DEBUG)
Logging.Log(LogLevel.DEBUG, "Serialized: {0}", json.ToString());
return true;
}
throw new NotSupportedException();
}
}
}

View File

@ -0,0 +1,51 @@
using ln.json;
using ln.json.mapping;
using ln.objects.collections;
using System;
using System.Collections.Generic;
using System.Runtime.Serialization;
using System.Text;
namespace ln.objects.serialization.json
{
public class LazyListMapping<T> : JSONMapping where T:class
{
ObjectStore ObjectStore;
Type ElementType;
public LazyListMapping(ObjectStore objectStore)
:base(typeof(IList<>))
{
ObjectStore = objectStore;
ElementType = typeof(T);
}
public override JSONValue ToJson(JSONMapper mapper, object value)
{
if (!(value is IList<T> ilist))
throw new ArgumentException(nameof(value));
JSONArray jsonArray = new JSONArray();
foreach (T item in ilist)
if (mapper.Serialize(item, out JSONValue jsonItem))
jsonArray.Add(jsonItem);
else
throw new SerializationException();
return jsonArray;
}
public override object FromJson(JSONMapper mapper, JSONValue json)
{
IList<T> ilist = new LazyList<T>(ObjectStore);
JSONArray jsonArray = json as JSONArray;
foreach (JSONValue item in jsonArray.Children)
ilist.Add((T)mapper.FromJson(item, ElementType));
return ilist;
}
}
}

View File

@ -0,0 +1,24 @@
using System;
using System.Collections.Generic;
using System.Dynamic;
using System.Text;
namespace ln.objects.storage
{
public class BinaryObject
{
public Guid UID { get; set; }
public int Version { get; set; }
public long Offset { get; set; }
public int PhySize { get; set; }
public int LogSize { get; set; }
public BinaryObject() { }
public BinaryObject(long offset,int phySize)
{
Offset = offset;
PhySize = phySize;
}
}
}

View File

@ -0,0 +1,82 @@
using ln.collections;
using System;
using System.Collections.Generic;
using System.Text;
namespace ln.objects.storage
{
class BinaryObjectCache
{
BTree<Guid, BTree<int, BinaryObject>> firstLevel = new BTree<Guid, BTree<int, BinaryObject>>();
public BinaryObjectCache() { }
BTree<int,BinaryObject> GetSecondLevel(Guid uid)
{
if (!firstLevel.TryGet(uid,out BTree<int,BinaryObject> secondLevel))
{
secondLevel = new BTree<int, BinaryObject>();
firstLevel.Add(uid, secondLevel);
}
return secondLevel;
}
bool TryGetSecondLevel(Guid uid, out BTree<int, BinaryObject> secondLevel) => firstLevel.TryGet(uid, out secondLevel) && !secondLevel.Empty;
public void Add(BinaryObject binaryObject)
{
BTree<int, BinaryObject> secondLevel = GetSecondLevel(binaryObject.UID);
secondLevel.Add(binaryObject.Version, binaryObject);
}
public void Remove(BinaryObject binaryObject)
{
BTree<int, BinaryObject> secondLevel = GetSecondLevel(binaryObject.UID);
secondLevel.Remove(binaryObject.Version);
}
public bool Contains(Guid uid) => GetSecondLevel(uid).ContainsKey(uid);
public IEnumerable<Guid> UIDs => firstLevel.Keys;
public IEnumerable<BinaryObject> GetBinaryObjects(Guid uid) => GetSecondLevel(uid).Values;
public BinaryObject GetLatestBinaryObject(Guid uid)
{
BTree<int, BinaryObject> secondLevel = GetSecondLevel(uid);
return secondLevel.LastValue();
}
public BinaryObject GetBinaryObject(Guid uid, int version) => GetSecondLevel(uid)[version];
public bool TryGetBinaryObjects(Guid uid,out IEnumerable<BinaryObject> binaryObjects)
{
if (!TryGetSecondLevel(uid,out BTree<int,BinaryObject> secondLevel))
{
binaryObjects = null;
return false;
}
binaryObjects = secondLevel.Values;
return true;
}
public bool TryGetBinaryObject(Guid uid, int version, out BinaryObject binaryObject)
{
if (TryGetSecondLevel(uid, out BTree<int, BinaryObject> secondLevel))
{
if (version == -1)
{
if (secondLevel.TryGetLastValue(out binaryObject))
return true;
}
else
{
if (secondLevel.TryGet(version, out binaryObject))
return true;
}
}
binaryObject = null;
return false;
}
public bool TryGetLatestBinaryObject(Guid uid, out BinaryObject binaryObject) => GetSecondLevel(uid).TryGetLastValue(out binaryObject);
public bool TryGetFirstBinaryObject(Guid uid, out BinaryObject binaryObject) => GetSecondLevel(uid).TryGetFirstValue(out binaryObject);
}
}

View File

@ -0,0 +1,296 @@
using ln.collections;
using ln.type;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
/*
* BinaryObjectFile Format
*
* Offset Type Description
* 0x0000 byte[4] Magic Number "BOF0"
* 0x0004 int32 FeatureFlags
* 0x0008 long offset of first binary object
*
* BinaryObject
* Guid UID UID of this object
* int32 Version Version of this object
* int32 phySize size of this binary object
* int32 logSize size of used payload
* int32 reserved Reserved 0
* byte[phySize] object data
*
*/
namespace ln.objects.storage
{
public enum BOFFeatures : int
{
NONE = 0
}
public class BinaryObjectFile : IDisposable
{
public readonly byte[] MagicNumber = Encoding.ASCII.GetBytes("BOF0");
public string FileName { get; }
public BOFFeatures Features { get; private set; } = BOFFeatures.NONE;
int granularity = 12;
public int Granularity
{
get => granularity;
set
{
if (granularity < 6)
throw new ArgumentOutOfRangeException(nameof(value), "Granularity must be >= 6");
granularity = value;
}
}
public int GranularSize => (1 << granularity);
public int GranularMask => (1 << granularity) - 1;
BinaryObjectCache binaryObjects;
BTree<long, BinaryObject> unusedBinaryObjects;
FileStream storageFile;
long appendOffset;
public BinaryObjectFile(string filename)
{
FileName = filename;
}
public void Open()
{
if (!File.Exists(FileName))
{
Create();
} else
{
OpenStorage();
}
}
private void Create()
{
storageFile = new FileStream(FileName, FileMode.CreateNew, FileAccess.ReadWrite);
storageFile.WriteBytes(MagicNumber);
storageFile.WriteInteger((int)Features);
storageFile.WriteLong(4096);
storageFile.Flush();
binaryObjects = new BinaryObjectCache();
unusedBinaryObjects = new BTree<long, BinaryObject>();
appendOffset = 4096;
}
private void OpenStorage()
{
storageFile = new FileStream(FileName, FileMode.Open, FileAccess.ReadWrite);
binaryObjects = new BinaryObjectCache();
unusedBinaryObjects = new BTree<long, BinaryObject>();
if (!storageFile.ReadBytes(MagicNumber.Length).AreEqual(MagicNumber))
throw new FormatException("Magic Number does not match");
Features = (BOFFeatures)storageFile.ReadInteger();
long offset = storageFile.ReadLong();
while (offset < storageFile.Length)
{
BinaryObject binaryObject = ReadBinaryObject(offset);
if (Guid.Empty.Equals(binaryObject.UID))
unusedBinaryObjects.Add(binaryObject.Offset, binaryObject);
else
binaryObjects.Add(binaryObject);
offset += binaryObject.PhySize;
}
appendOffset = offset;
}
private BinaryObject ReadBinaryObject(long offset)
{
storageFile.Position = offset;
BinaryObject binaryObject = new BinaryObject();
binaryObject.UID = new Guid(storageFile.ReadBytes(16));
binaryObject.Version = storageFile.ReadInteger();
binaryObject.Offset = offset;
binaryObject.PhySize = storageFile.ReadInteger();
binaryObject.LogSize = storageFile.ReadInteger();
storageFile.ReadInteger();
return binaryObject;
}
private void WriteBinaryObject(BinaryObject binaryObject)
{
storageFile.Position = binaryObject.Offset;
storageFile.WriteBytes(binaryObject.UID.ToByteArray());
storageFile.WriteInteger(binaryObject.Version);
storageFile.WriteInteger(binaryObject.PhySize);
storageFile.WriteInteger(binaryObject.LogSize);
storageFile.WriteInteger(0);
}
private void WriteBinaryObject(BinaryObject binaryObject,byte[] data)
{
if (data.Length > (binaryObject.PhySize - 32))
throw new ArgumentException(nameof(data),"more physical space needed for data");
binaryObject.LogSize = data.Length;
WriteBinaryObject(binaryObject);
storageFile.WriteBytes(data);
}
public IEnumerable<Guid> ObjectUIDs => binaryObjects.UIDs;
public IEnumerable<int> GetObjectVersions(Guid uid) => binaryObjects.GetBinaryObjects(uid).Select(bo => bo.Version);
public bool Contains(Guid uid) => binaryObjects.Contains(uid);
public byte[] ReadBinaryObject(Guid uid) => ReadBinaryObject(uid, -1);
public byte[] ReadBinaryObject(Guid uid,int version)
{
lock (this)
{
BinaryObject binaryObject = (version == -1) ? binaryObjects.GetLatestBinaryObject(uid) : binaryObjects.GetBinaryObject(uid, version);
storageFile.Position = binaryObject.Offset + 32;
return storageFile.ReadBytes(binaryObject.LogSize);
}
}
public bool TryReadBinaryObject(Guid uid,out byte[] serializedBytes) => TryReadBinaryObject(uid, -1, out serializedBytes);
public bool TryReadBinaryObject(Guid uid, int version, out byte[] serializedBytes)
{
lock (this)
{
if (binaryObjects.TryGetBinaryObject(uid,version, out BinaryObject binaryObject))
{
storageFile.Position = binaryObject.Offset + 32;
serializedBytes = storageFile.ReadBytes(binaryObject.LogSize);
return true;
}
}
serializedBytes = null;
return false;
}
public void WriteBinaryObject(Guid uid, byte[] data)
{
lock (this)
{
if (!FindUnusedBinaryObject(data.Length + 32, out BinaryObject binaryObject))
if (!AppendBinaryObject(data.Length + 32, out binaryObject))
throw new OutOfMemoryException("could not append to storage");
if (binaryObjects.TryGetLatestBinaryObject(uid, out BinaryObject latestObject))
binaryObject.Version = latestObject.Version + 1;
binaryObject.UID = uid;
WriteBinaryObject(binaryObject, data);
binaryObjects.Add(binaryObject);
}
}
public void RemoveBinaryObject(Guid uid, int version)
{
lock (this)
{
BinaryObject binaryObject = binaryObjects.GetBinaryObject(uid, version);
binaryObjects.Remove(binaryObject);
PushUnusedBinaryObject(ref binaryObject);
WriteBinaryObject(binaryObject);
}
}
public void RemoveBinaryObjects(Guid uid)
{
lock (this)
{
foreach (int version in GetObjectVersions(uid))
{
RemoveBinaryObject(uid, version);
}
}
}
private bool FindUnusedBinaryObject(int minPhySize,out BinaryObject binaryObject)
{
minPhySize = (minPhySize + GranularMask) & ~GranularMask;
foreach (BinaryObject candidate in unusedBinaryObjects.Values)
{
if (candidate.PhySize >= minPhySize)
{
unusedBinaryObjects.Remove(candidate.Offset);
binaryObject = candidate;
if ((binaryObject.PhySize - minPhySize) >= GranularSize)
{
BinaryObject splitObject = new BinaryObject(binaryObject.Offset + minPhySize, binaryObject.PhySize - minPhySize);
binaryObject.PhySize = minPhySize;
WriteBinaryObject(splitObject);
unusedBinaryObjects.Add(splitObject.Offset, splitObject);
}
return true;
}
}
binaryObject = null;
return false;
}
private void PushUnusedBinaryObject(ref BinaryObject binaryObject)
{
binaryObject.UID = Guid.Empty;
unusedBinaryObjects.Add(binaryObject.Offset, binaryObject);
if (unusedBinaryObjects.TryGetPreviousValue(binaryObject.Offset, out BinaryObject previousObject))
{
if ((previousObject.Offset + previousObject.PhySize) == binaryObject.Offset)
{
previousObject.PhySize += binaryObject.PhySize;
unusedBinaryObjects.Remove(binaryObject.Offset);
binaryObject = previousObject;
}
}
if (unusedBinaryObjects.TryGetNextValue(binaryObject.Offset, out BinaryObject nextObject))
{
if ((binaryObject.Offset + binaryObject.PhySize) == nextObject.Offset)
{
binaryObject.PhySize += nextObject.PhySize;
unusedBinaryObjects.Remove(nextObject.Offset);
}
}
}
private bool AppendBinaryObject(int minPhySize,out BinaryObject binaryObject)
{
minPhySize = (minPhySize + GranularMask) & ~GranularMask;
binaryObject = new BinaryObject(appendOffset, minPhySize);
appendOffset += minPhySize;
return true;
}
public void Close()
{
lock (this)
{
storageFile.Close();
}
}
public void Flush() => storageFile.Flush();
public void Dispose()
{
storageFile?.Dispose();
storageFile = null;
}
}
}