From 2ea02f863300f0d7dac196eea0585c7db4d3ade1 Mon Sep 17 00:00:00 2001 From: Harald Wolff-Thobaben Date: Wed, 18 Nov 2020 00:24:25 +0100 Subject: [PATCH] Initial Commit --- .gitignore | 41 ++ ObjectCache.cs | 40 ++ ObjectStore.cs | 351 ++++++++++++++ catalog/ODBBool.cs | 43 ++ catalog/ODBByteBuffer.cs | 52 +++ catalog/ODBDouble.cs | 28 ++ catalog/ODBEntity.cs | 208 +++++++++ catalog/ODBGuid.cs | 24 + catalog/ODBInteger.cs | 42 ++ catalog/ODBList.cs | 132 ++++++ catalog/ODBLong.cs | 63 +++ catalog/ODBNull.cs | 24 + catalog/ODBObject.cs | 152 ++++++ catalog/ODBStringValue.cs | 22 + catalog/ODBTypedValue.cs | 84 ++++ catalog/ODBValue.cs | 75 +++ collections/LazyList.cs | 105 +++++ index/Index.cs | 25 + index/IndexLeaf.cs | 55 +++ index/SimpleIndex.cs | 57 +++ ln.objects.csproj | 20 + ng/Document.cs | 189 ++++++++ ng/IODBMapping.cs | 10 + ng/IdentityCache.cs | 56 +++ ng/Mapper.API.cs | 220 +++++++++ ng/Mapper.cs | 302 ++++++++++++ ng/ObjectCollection.cs | 307 ++++++++++++ ng/Query.cs | 196 ++++++++ ng/Reference.cs | 36 ++ ng/diff/Diff.cs | 57 +++ ng/diff/DocumentDiff.cs | 60 +++ ng/diff/ListDiff.cs | 102 ++++ ng/index/Index.cs | 34 ++ ng/index/IndexPath.cs | 435 ++++++++++++++++++ ng/index/Path.cs | 66 +++ ng/index/SimpleIndex.cs | 108 +++++ ng/mappings/ClassMapping.cs | 225 +++++++++ ng/mappings/DictionaryMapping.cs | 84 ++++ ng/mappings/ListMapping.cs | 146 ++++++ ng/mappings/ReferenceMapping.cs | 45 ++ ng/mappings/SimpleMapping.cs | 26 ++ ng/storage/IStorage.cs | 64 +++ ng/storage/IStorageContainer.cs | 23 + ng/storage/OrganizedFile.cs | 227 +++++++++ ng/storage/OrganizedFileType.cs | 9 + ng/storage/SegmentedFile.cs | 308 +++++++++++++ ng/storage/StorageArea.cs | 45 ++ ng/storage/StorageAreaContainer.cs | 79 ++++ ng/storage/bases/ChainedStorage.cs | 43 ++ ng/storage/bases/StorageBase.cs | 60 +++ ng/storage/cache/CachingStorage.cs | 97 ++++ ng/storage/fs/FSStorageContainer.cs | 145 ++++++ ng/storage/fs/SegmentedFileStorage.cs | 375 +++++++++++++++ ng/storage/session/SessionStorage.cs | 84 ++++ ng/storage/session/SessionStorageContainer.cs | 67 +++ query/Criterion.cs | 11 + query/EqualsCriterion.cs | 13 + serialization/Deserializer.cs | 39 ++ serialization/Serializer.cs | 40 ++ serialization/binary/BinaryDeserializer.cs | 246 ++++++++++ serialization/binary/BinarySerializer.cs | 164 +++++++ serialization/json/JSONDeserializer.cs | 75 +++ serialization/json/JSONSerializer.cs | 67 +++ serialization/json/LazyListMapping.cs | 51 ++ storage/BinaryObject.cs | 24 + storage/BinaryObjectCache.cs | 82 ++++ storage/BinaryObjectFile.cs | 296 ++++++++++++ 67 files changed, 7081 insertions(+) create mode 100644 .gitignore create mode 100644 ObjectCache.cs create mode 100644 ObjectStore.cs create mode 100644 catalog/ODBBool.cs create mode 100644 catalog/ODBByteBuffer.cs create mode 100644 catalog/ODBDouble.cs create mode 100644 catalog/ODBEntity.cs create mode 100644 catalog/ODBGuid.cs create mode 100644 catalog/ODBInteger.cs create mode 100644 catalog/ODBList.cs create mode 100644 catalog/ODBLong.cs create mode 100644 catalog/ODBNull.cs create mode 100644 catalog/ODBObject.cs create mode 100644 catalog/ODBStringValue.cs create mode 100644 catalog/ODBTypedValue.cs create mode 100644 catalog/ODBValue.cs create mode 100644 collections/LazyList.cs create mode 100644 index/Index.cs create mode 100644 index/IndexLeaf.cs create mode 100644 index/SimpleIndex.cs create mode 100644 ln.objects.csproj create mode 100644 ng/Document.cs create mode 100644 ng/IODBMapping.cs create mode 100644 ng/IdentityCache.cs create mode 100644 ng/Mapper.API.cs create mode 100644 ng/Mapper.cs create mode 100644 ng/ObjectCollection.cs create mode 100644 ng/Query.cs create mode 100644 ng/Reference.cs create mode 100644 ng/diff/Diff.cs create mode 100644 ng/diff/DocumentDiff.cs create mode 100644 ng/diff/ListDiff.cs create mode 100644 ng/index/Index.cs create mode 100644 ng/index/IndexPath.cs create mode 100644 ng/index/Path.cs create mode 100644 ng/index/SimpleIndex.cs create mode 100644 ng/mappings/ClassMapping.cs create mode 100644 ng/mappings/DictionaryMapping.cs create mode 100644 ng/mappings/ListMapping.cs create mode 100644 ng/mappings/ReferenceMapping.cs create mode 100644 ng/mappings/SimpleMapping.cs create mode 100644 ng/storage/IStorage.cs create mode 100644 ng/storage/IStorageContainer.cs create mode 100644 ng/storage/OrganizedFile.cs create mode 100644 ng/storage/OrganizedFileType.cs create mode 100644 ng/storage/SegmentedFile.cs create mode 100644 ng/storage/StorageArea.cs create mode 100644 ng/storage/StorageAreaContainer.cs create mode 100644 ng/storage/bases/ChainedStorage.cs create mode 100644 ng/storage/bases/StorageBase.cs create mode 100644 ng/storage/cache/CachingStorage.cs create mode 100644 ng/storage/fs/FSStorageContainer.cs create mode 100644 ng/storage/fs/SegmentedFileStorage.cs create mode 100644 ng/storage/session/SessionStorage.cs create mode 100644 ng/storage/session/SessionStorageContainer.cs create mode 100644 query/Criterion.cs create mode 100644 query/EqualsCriterion.cs create mode 100644 serialization/Deserializer.cs create mode 100644 serialization/Serializer.cs create mode 100644 serialization/binary/BinaryDeserializer.cs create mode 100644 serialization/binary/BinarySerializer.cs create mode 100644 serialization/json/JSONDeserializer.cs create mode 100644 serialization/json/JSONSerializer.cs create mode 100644 serialization/json/LazyListMapping.cs create mode 100644 storage/BinaryObject.cs create mode 100644 storage/BinaryObjectCache.cs create mode 100644 storage/BinaryObjectFile.cs diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..cd51a9f --- /dev/null +++ b/.gitignore @@ -0,0 +1,41 @@ +# Autosave files +*~ + +# build +[Oo]bj/ +[Bb]in/ +packages/ +TestResults/ + +# globs +Makefile.in +*.DS_Store +*.sln.cache +*.suo +*.cache +*.pidb +*.userprefs +*.usertasks +config.log +config.make +config.status +aclocal.m4 +install-sh +autom4te.cache/ +*.user +*.tar.gz +tarballs/ +test-results/ +Thumbs.db +.vs/ + +# Mac bundle stuff +*.dmg +*.app + +# resharper +*_Resharper.* +*.Resharper + +# dotCover +*.dotCover diff --git a/ObjectCache.cs b/ObjectCache.cs new file mode 100644 index 0000000..6b1f72d --- /dev/null +++ b/ObjectCache.cs @@ -0,0 +1,40 @@ +using ln.collections; +using System; +using System.Collections.Generic; +using System.Text; + +namespace ln.objects +{ + class ObjectCache + { + WeakKeyReferenceDictionary guidLookup = new WeakKeyReferenceDictionary(); + WeakValueDictionary objectLookup = new WeakValueDictionary(); + + public ObjectCache() + { } + + public object this[Guid uid] => objectLookup[uid]; + public Guid this[object o] => guidLookup[o]; + + public bool TryGetUID(object o, out Guid uid) => guidLookup.TryGetValue(o, out uid); + public bool TryGetObject(Guid uid, out object o) => objectLookup.TryGetValue(uid, out o); + + public void Add(Guid uid, object o) + { + guidLookup.Add(o, uid); + objectLookup.Add(uid, o); + } + + public void Remove(Guid uid) + { + guidLookup.Remove(objectLookup[uid]); + objectLookup.Remove(uid); + } + public void Remove(object o) + { + objectLookup.Remove(guidLookup[o]); + guidLookup.Remove(o); + } + + } +} diff --git a/ObjectStore.cs b/ObjectStore.cs new file mode 100644 index 0000000..168612c --- /dev/null +++ b/ObjectStore.cs @@ -0,0 +1,351 @@ +using ln.logging; +using ln.objects.index; +using ln.objects.serialization; +using ln.objects.storage; +using ln.type; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.Serialization; +using ln.objects.serialization.binary; +using ln.collections; +using System; + +namespace ln.objects +{ + public class ObjectStore : IDisposable + { + public static bool DEBUG; + + public static Guid TYPECACHE_GUID = new Guid("00000000-0000-0000-0001-000000000001"); + public static Guid SPECIAL_GUID_LIMIT = new Guid("00000000-0000-0000-0002-000000000000"); + + public String FileName => objectFile.FileName; + + public Factory SerializerFactory { get; } + public Factory DeserializerFactory { get; } + + Deserializer defaultDeserializer; + + BinaryObjectFile objectFile; + ObjectCache objectCache; + BTreeValueSet objectTypeCache; + + Dictionary indeces = new Dictionary(); + + + SaveTransaction currentTransaction; + + public ObjectStore(String filename) + :this(filename,() => new BinarySerializer(),() => new BinaryDeserializer()) + { + } + public ObjectStore(string filename, Factory serializerFactory, Factory deserializerFactory) + : this(filename, (o) => serializerFactory(), (o) => deserializerFactory()) + { + } + public ObjectStore(string filename, Factory serializerFactory, Factory deserializerFactory) + { + SerializerFactory = serializerFactory; + DeserializerFactory = deserializerFactory; + + objectFile = new BinaryObjectFile(filename); + objectCache = new ObjectCache(); + } + + public void Open() + { + lock (this) + { + objectFile.Open(); + + defaultDeserializer = DeserializerFactory(this); + defaultDeserializer.OnLookupObjectByReference += LookupObjectByReference; + + if (objectFile.TryReadBinaryObject(TYPECACHE_GUID, out byte[] typeCacheBytes)) + { + object otc = null; + if (defaultDeserializer.DeserializeObject(typeCacheBytes, ref otc)) + { + KeyValuePair[] typeCacheValues = (KeyValuePair[])otc; + objectTypeCache = new BTreeValueSet(); + objectTypeCache.AddRange(typeCacheValues.Select((kvp) => new KeyValuePair(Type.GetType(kvp.Key), kvp.Value))); + + objectFile.RemoveBinaryObjects(TYPECACHE_GUID); + objectFile.Flush(); + } + else + { + objectFile.RemoveBinaryObjects(TYPECACHE_GUID); + objectFile.Flush(); + + objectTypeCache = new BTreeValueSet(); + + foreach (Guid uid in objectFile.ObjectUIDs) + { + if (objectFile.TryReadBinaryObject(uid, out byte[] serializedBytes)) + { + if (defaultDeserializer.TryGetType(serializedBytes, out Type type)) + { + objectTypeCache.TryAdd(type, uid); + } + } + } + } + } else + { + objectTypeCache = new BTreeValueSet(); + } + } + } + public void Close() + { + lock (this) + { + if (objectFile != null) + { + KeyValuePair[] typeCacheValues = objectTypeCache.GetKeyValuePairs().Select((kvp) => new KeyValuePair(kvp.Key.GetSimpleQualifiedName(), kvp.Value)).ToArray(); + + if (SerializerFactory(this).SerializeObject(typeCacheValues, out byte[] typeCacheBytes)) + { + objectFile.WriteBinaryObject(TYPECACHE_GUID, typeCacheBytes); + objectFile.Flush(); + } + + objectFile.Dispose(); + objectFile = null; + } + } + } + + + public T LoadObject(Guid uid) => (T)LoadObject(uid, typeof(T)); + public object LoadObject(Guid uid,Type type) + { + lock (this) + { + if (!objectCache.TryGetObject(uid,out object o)) + { + o = Activator.CreateInstance(type, true); + objectCache.Add(uid, o); + + byte[] boData = objectFile.ReadBinaryObject(uid); + + if (!defaultDeserializer.DeserializeObject(boData, ref o)) + throw new Exception("unable to deserialize"); + } + return o; + } + } + + public IEnumerable LoadObjects(Type type) + { + foreach (Guid uid in objectTypeCache[type]) + { + yield return LoadObject(uid, type); + } + } + public IEnumerable LoadObjects() + { + foreach (Guid uid in objectTypeCache[typeof(T)]) + { + yield return LoadObject(uid); + } + } + + + private bool LookupObjectByReference(object reference, Type targetType, out object o) + { + if (DEBUG) + Logging.Log(LogLevel.DEBUG, "LookupObjectReference: {0} [ {1} ]", reference, targetType.Name); + + o = LoadObject((Guid)reference, targetType); + return true; + } + + private void TransactionalAction(SaveTransaction saveTransaction, Action action) + { + lock (this) + { + SaveTransaction oldTransaction = currentTransaction; + currentTransaction = saveTransaction; + + try + { + action(); + } + finally + { + currentTransaction = oldTransaction; + } + } + } + + public Guid SaveObject(object o) => SaveObject(GetObjectUID(o), o); + public Guid SaveObject(Guid uid, object o) + { + lock (this) + { + SaveTransaction saveTransaction = new SaveTransaction(this); + + TransactionalAction(saveTransaction, () => PrepareSave(saveTransaction, uid, o)); + + foreach (KeyValuePair bo in saveTransaction.BinaryObjects) + if (bo.Value != null) + { + if (DEBUG) + Logging.Log(LogLevel.DEBUG, "Storing {0} [ {1} ]", bo.Key, objectCache[bo.Key].GetType()); + objectFile.WriteBinaryObject(bo.Key, bo.Value); + } + else if (DEBUG) + Logging.Log(LogLevel.DEBUG, "Ignoring unchanged {0} [ {1} ]", bo.Key, objectCache[bo.Key].GetType()); + + return uid; + } + } + + private void PrepareSave(SaveTransaction saveTransaction,Guid uid, object o) + { + saveTransaction.PushReferencedObjects(); + + if (!saveTransaction.Serializer.SerializeObject(o, out byte[] serializedBytes)) + throw new SerializationException(); + + if (!Object.ReferenceEquals(null, o)) + { + objectTypeCache.TryAdd(o.GetType(), uid); + GetIndexLeaf(o.GetType()).Reindex(uid, o); + } + + if (objectFile.TryReadBinaryObject(uid, out byte[] storedBytes) && storedBytes.AreEqual(serializedBytes)) + { + saveTransaction.BinaryObjects.Add(uid, null); + saveTransaction.PopReferencedObjects(); + } + else + { + saveTransaction.BinaryObjects.Add(uid, serializedBytes); + saveTransaction.PopReferencedObjectsToFinal(); + } + + foreach (object ro in saveTransaction.ReferencedObjects.ToArray()) + { + Guid oUid = GetObjectUID(ro); + if (!saveTransaction.BinaryObjects.ContainsKey(oUid)) + PrepareSave(saveTransaction, oUid, ro); + } + } + + public Guid GetObjectUID(object o) + { + lock (this) + { + if (!objectCache.TryGetUID(o, out Guid uid)) + { + uid = Guid.NewGuid(); + objectCache.Add(uid, o); + } + return uid; + } + } + + private IndexLeaf GetIndexLeaf(Type type) + { + if (!indeces.TryGetValue(type,out IndexLeaf indexLeaf)) + { + indexLeaf = new IndexLeaf(type, null, (o) => o); + indeces.Add(type, indexLeaf); + } + return indexLeaf; + } + + public bool EnsureIndeces(Type type,IEnumerable> indexDefinitions) + { + bool rebuild = false; + + foreach (KeyValuePair indexDefinition in indexDefinitions) + rebuild |= EnsureIndex(type, indexDefinition.Key, indexDefinition.Value, false); + + if (rebuild) + RebuildIndeces(); + + return rebuild; + } + + public IEnumerable QueryObjects(Type type, string path, Func criterion) => QueryUids(type, path, criterion).Select((uid) => LoadObject(uid, type)); + public IEnumerable QueryUids(Type type,string path,Func criterion) + { + HashSet result = new HashSet(); + GetIndexLeaf(type).GetLeaf(path).Match(criterion, result); + return result; + } + + public bool EnsureIndex(Type type, string path, index.Index index) => EnsureIndex(type, path, index, true); + private bool EnsureIndex(Type type, string path, index.Index index, bool rebuild) + { + IndexLeaf indexLeaf = GetIndexLeaf(type).GetLeaf(path); + if (index.GetType().Equals(indexLeaf.Index)) + return false; + + indexLeaf.Index = index; + + if (rebuild) + RebuildIndeces(); + + return true; + } + + private void RebuildIndeces() + { + foreach (Type type in objectTypeCache.Keys) + { + IndexLeaf indexLeaf = GetIndexLeaf(type); + indexLeaf.Clear(); + foreach (object value in LoadObjects(type)) + indexLeaf.Reindex(GetObjectUID(value), value); + } + } + + + public void Dispose() + { + Close(); + } + + class SaveTransaction + { + public ObjectStore ObjectStore { get; } + public Serializer Serializer { get; } + + public HashSet ReferencedObjects { get; } = new HashSet(); + + public Stack> referencedObjectsStack { get; } = new Stack>(); + HashSet currentReferencedObjectsSet => referencedObjectsStack.Peek(); + + public Dictionary BinaryObjects { get; } = new Dictionary(); + + public SaveTransaction(ObjectStore objectStore) + { + ObjectStore = objectStore; + Serializer = ObjectStore.SerializerFactory(objectStore); + Serializer.OnLookupReference += LookupReference; + } + + public bool LookupReference(object value, out object reference) + { + reference = ObjectStore.GetObjectUID(value); + currentReferencedObjectsSet.Add(value); + return true; + } + + public void PushReferencedObjects() => referencedObjectsStack.Push(new HashSet()); + public void PopReferencedObjects() => referencedObjectsStack.Pop(); + public void PopReferencedObjectsToFinal() + { + HashSet ro = referencedObjectsStack.Pop(); + foreach (object o in ro) + ReferencedObjects.Add(o); + } + } + + } +} diff --git a/catalog/ODBBool.cs b/catalog/ODBBool.cs new file mode 100644 index 0000000..44f4804 --- /dev/null +++ b/catalog/ODBBool.cs @@ -0,0 +1,43 @@ +// /** +// * File: ODBBool.cs +// * Author: haraldwolff +// * +// * This file and it's content is copyrighted by the Author and / or copyright holder. +// * Any use wihtout proper permission is illegal and may lead to legal actions. +// * +// * +// **/ +using System; + +namespace ln.objects.catalog +{ + public class ODBBool : ODBValue + { + public static ODBBool True = new ODBBool(true); + public static ODBBool False = new ODBBool(false); + + bool isTrue; + + private ODBBool(bool b) + : base(0x04, b) + { + isTrue = b; + } + + public override byte[] Serialize() => new byte[] { isTrue ? (byte)0xFF : (byte)0x00 }; + + protected override int compare(ODBEntity other) + { + if (ReferenceEquals(this, other)) + return 0; + if (isTrue) + return 1; + return -1; + } + + static ODBBool() + { + RegisterDeserializer(0x04, (b, o, l) => b[o] != 0 ? True : False); + } + } +} diff --git a/catalog/ODBByteBuffer.cs b/catalog/ODBByteBuffer.cs new file mode 100644 index 0000000..9e40999 --- /dev/null +++ b/catalog/ODBByteBuffer.cs @@ -0,0 +1,52 @@ +// /** +// * File: ODBByteBuffer.cs +// * Author: haraldwolff +// * +// * This file and it's content is copyrighted by the Author and / or copyright holder. +// * Any use wihtout proper permission is illegal and may lead to legal actions. +// * +// * +// **/ +using ln.type; +using System; +namespace ln.objects.catalog +{ + public class ODBByteBuffer : ODBValue + { + public byte[] GetBytes() => (byte[])Value; + + + public ODBByteBuffer(byte[] bytes) + : base(0x0800, bytes.Slice(0)) + { + } + + public override byte[] Serialize() => GetBytes(); + protected override int compare(ODBEntity other) + { + ODBByteBuffer you = other as ODBByteBuffer; + + byte[] myBytes = GetBytes(); + byte[] yourBytes = you.GetBytes(); + + int dl = myBytes.Length - yourBytes.Length; + + if (dl != 0) + return dl; + + while (dl < myBytes.Length) + { + int d = myBytes[dl] - yourBytes[dl++]; + if (d != 0) + return d; + } + + return 0; + } + + static ODBByteBuffer() + { + RegisterDeserializer(0x0800, (storageBytes, offset, length) => new ODBByteBuffer(storageBytes.Slice(offset, length))); + } + } +} diff --git a/catalog/ODBDouble.cs b/catalog/ODBDouble.cs new file mode 100644 index 0000000..ed9a5bc --- /dev/null +++ b/catalog/ODBDouble.cs @@ -0,0 +1,28 @@ +using System; +namespace ln.objects.catalog +{ + public class ODBDouble : ODBValue + { + public ODBDouble(double value) + :base(0x18,value) + {} + + public override byte[] Serialize() => BitConverter.GetBytes((double)Value); + protected override int compare(ODBEntity other) + { + double a = (double)Value; + double b = (double)(other as ODBValue).Value; + + if (Math.Abs(a - b) < double.Epsilon) + return 0; + if (a < b) + return -1; + return 1; + } + + static ODBDouble() + { + RegisterDeserializer(0x0018, (b, o, l) => new ODBDouble(BitConverter.ToDouble(b, o))); + } + } +} diff --git a/catalog/ODBEntity.cs b/catalog/ODBEntity.cs new file mode 100644 index 0000000..d6cb67a --- /dev/null +++ b/catalog/ODBEntity.cs @@ -0,0 +1,208 @@ +using ln.type; +using System; +using System.Collections.Generic; +using System.IO; +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Text; + +/** + * typeCode list + * + * 0x0000 ODBNull + * 0x0001 ODBStringValue + * 0x0002 ODBList + * 0x0003 ODBGuid + * 0x0004 ODBBool + * 0x0005 ODBObject + * + * 0x0010 ODBInteger + * 0x0011 ODBUInteger + * 0x0012 ODBLong + * 0x0013 ODBULong + * + * 0x0018 ODBDouble + * + * 0x0020 ODBTypedMapping + * + * 0x0800 ODBByteBuffer + * + * 0x1000 ODBDocument + * 0x1001 Document (ln.types.odb.ng) + * + * + * + **/ + + + +namespace ln.objects.catalog +{ + public delegate ODBEntity ODBValueFactory(object value); + public delegate ODBEntity ODBDeserialize(byte[] storageBytes, int offset, int length); + + /// + /// ODBEntity. The base of all ODB types. + /// + /// The data model used by ODB can be described as follows: + /// + /// Each piece of information to be used with ODB may be called an entity. + /// ODB provides support for several types of entities (e.g. numbers, strings, documents, lists,...) + /// An entity has an identity and a state. + /// Two entities of same type having the same identity are considered to be "equal". + /// Two entities of same type having the same identity may represent different states of the same entity. (different "Versions", e.g. an old and up to date + /// + /// ODBEntity instances + /// - define identity through Identity + /// - may be seen as a "container" holding a certain state + /// + /// ODBValue instances + /// extend ODBEntity + /// - implement Identity to return itself + /// - implement CompareTo as simple "substraction" + /// + /// other ODBEntity subtypes + /// - implement a read-only Identity to return a ODBvalue instance + /// - implement CompareTo type specific without defined semantic meaning + /// + /// + /// + /// + public abstract class ODBEntity : IComparable + { + int storageTypeCode; + + /// + /// Gets the identity of this Entity. + /// + /// The identity. + public virtual ODBValue Identity { get; } + + /// + /// Independently clone this instance. + /// + /// + /// For immutable values this returns the instance itself. + /// Complex ODBEntities will return a copy of themself that is completly independend of the source. + /// + /// The clone. + public abstract ODBEntity Clone(); + + /// + /// Implements the internal comparison within the same subclass of ODBEntity. + /// + /// The compare. + /// Other. + protected abstract int compare(ODBEntity other); + + + + + protected ODBEntity(int storageTypeCode) + { + this.storageTypeCode = storageTypeCode; + } + + + public int CompareTo(ODBEntity other) + { + if (storageTypeCode != other.storageTypeCode) + return storageTypeCode - other.storageTypeCode; + return compare(other); + } + + public abstract byte[] Serialize(); + public virtual void Serialize(BinaryWriter storage) + { + byte[] storageBytes = Serialize(); + + storage.Write(storageTypeCode); + storage.Write(storageBytes.Length); + storage.Write(storageBytes, 0, storageBytes.Length); + } + + public override int GetHashCode() => Identity.GetHashCode(); + public override bool Equals(object obj) + { + if (Equals(GetType(), obj.GetType()) && obj is ODBEntity) + return Equals(Identity, (obj as ODBEntity).Identity); + return false; + } + + public static bool operator <(ODBEntity a, ODBEntity b) => a.CompareTo(b) < 0; + public static bool operator >(ODBEntity a, ODBEntity b) => a.CompareTo(b) > 0; + public static bool operator <=(ODBEntity a, ODBEntity b) => a.CompareTo(b) <= 0; + public static bool operator >=(ODBEntity a, ODBEntity b) => a.CompareTo(b) >= 0; + public static bool operator ==(ODBEntity a, ODBEntity b) => a is null ? b is null : a.CompareTo(b) == 0; + public static bool operator !=(ODBEntity a, ODBEntity b) => a is null ? !(b is null) : a.CompareTo(b) != 0; + + //public static implicit operator ODBEntity(ValueType v) + //{ + // return Mapper.Default.MapValue(v); + //} + //public static implicit operator ODBEntity(String v) + //{ + // return Mapper.Default.MapValue(v); + //} + + //public static ODBEntity FromNative(object v) + //{ + // return Mapper.Default.MapValue(v); + //} + + static Dictionary valueDeserializers = new Dictionary(); + public static void RegisterDeserializer(int storageTypeCode, ODBDeserialize deserialize) + { + valueDeserializers.Add(storageTypeCode, deserialize); + } + + public static ODBEntity Deserialize(byte[] buffer, ref int offset) + { + int storageTypeCode = BitConverter.ToInt32(buffer, offset); + int storageLength = BitConverter.ToInt32(buffer, offset + 4); + if (!valueDeserializers.ContainsKey(storageTypeCode)) + throw new KeyNotFoundException(string.Format("StorageTypeCode 0x{0:x8} at offset 0x{1:x8}", storageTypeCode, offset)); + + ODBEntity value = valueDeserializers[storageTypeCode](buffer, offset + 8, storageLength); + offset += 8 + storageLength; + return value; + } + + public static ODBEntity Deserialize(Stream stream) + { + int storageTypeCode = stream.ReadInteger(); + int storageLength = stream.ReadInteger(); + byte[] b = new byte[storageLength]; + stream.Read(b, 0, storageLength); + + if (valueDeserializers.ContainsKey(storageTypeCode)) + return valueDeserializers[storageTypeCode](b, 0, storageLength); + else + throw new FormatException("wrong storage type code"); + } + + public override string ToString() + { + return string.Format("[{0} Identity={1}]", GetType().Name, Identity); + } + + public string TreeString => ToTreeString(0); + public abstract string ToTreeString(int indent); + + static ODBEntity() + { + RuntimeHelpers.RunClassConstructor(typeof(ODBNull).TypeHandle); + RuntimeHelpers.RunClassConstructor(typeof(ODBObject).TypeHandle); + RuntimeHelpers.RunClassConstructor(typeof(ODBList).TypeHandle); + RuntimeHelpers.RunClassConstructor(typeof(ODBStringValue).TypeHandle); + RuntimeHelpers.RunClassConstructor(typeof(ODBInteger).TypeHandle); + RuntimeHelpers.RunClassConstructor(typeof(ODBUInteger).TypeHandle); + RuntimeHelpers.RunClassConstructor(typeof(ODBLong).TypeHandle); + RuntimeHelpers.RunClassConstructor(typeof(ODBULong).TypeHandle); + RuntimeHelpers.RunClassConstructor(typeof(ODBDouble).TypeHandle); + RuntimeHelpers.RunClassConstructor(typeof(ODBGuid).TypeHandle); + RuntimeHelpers.RunClassConstructor(typeof(ODBBool).TypeHandle); + RuntimeHelpers.RunClassConstructor(typeof(ODBByteBuffer).TypeHandle); + } + } +} diff --git a/catalog/ODBGuid.cs b/catalog/ODBGuid.cs new file mode 100644 index 0000000..987d7a3 --- /dev/null +++ b/catalog/ODBGuid.cs @@ -0,0 +1,24 @@ +using ln.type; +using System; +using System.Linq; +namespace ln.objects.catalog +{ + public class ODBGuid : ODBValue + { + public ODBGuid() + : base(0x03, Guid.NewGuid()) + { } + public ODBGuid(Guid guid) + : base(0x03, guid) + { } + + public override byte[] Serialize() => ((Guid)Value).ToByteArray(); + protected override int compare(ODBEntity other) => ((Guid)Value).CompareTo((other as ODBGuid).Value); + + static ODBGuid() + { + RegisterDeserializer(0x03, (b, o, l) => new ODBGuid(new Guid(b.Slice(o, 16)))); + } + + } +} diff --git a/catalog/ODBInteger.cs b/catalog/ODBInteger.cs new file mode 100644 index 0000000..9714f9a --- /dev/null +++ b/catalog/ODBInteger.cs @@ -0,0 +1,42 @@ +using System; +using System.Runtime.CompilerServices; +namespace ln.objects.catalog +{ + public class ODBInteger : ODBValue + { + public ODBInteger(int i) + : base(0x10, i) + { } + public override byte[] Serialize() => BitConverter.GetBytes((int)Value); + protected override int compare(ODBEntity other) => (int)Value - (int)(other as ODBValue).Value; + + static ODBInteger() + { + RegisterDeserializer(0x10, (b, o, l) => new ODBInteger(BitConverter.ToInt32(b, o))); + } + + } + public class ODBUInteger : ODBValue + { + public ODBUInteger(uint i) + : base(0x11, i) + { } + + public override byte[] Serialize() => BitConverter.GetBytes((uint)Value); + protected override int compare(ODBEntity other) + { + long d = Convert.ToInt64((uint)Value) - Convert.ToInt64((uint)(other as ODBValue).Value); + if (d == 0) + return 0; + if (d < 0) + return -1; + return 1; + } + + static ODBUInteger() + { + RegisterDeserializer(0x11, (b, o, l) => new ODBUInteger(BitConverter.ToUInt32(b, o))); + } + + } +} diff --git a/catalog/ODBList.cs b/catalog/ODBList.cs new file mode 100644 index 0000000..245a502 --- /dev/null +++ b/catalog/ODBList.cs @@ -0,0 +1,132 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Collections; +using System.Text; +using ln.type; + +namespace ln.objects.catalog +{ + public class ODBList : ODBEntity, IEnumerable + { + Guid identity = Guid.NewGuid(); + public override ODBValue Identity => new ODBGuid(identity); + + List items = new List(); + + public ODBList() + : base(0x02) + { + } + public ODBList(byte[] bytes, int offset, int length) + : this() + { + MemoryStream stream = new MemoryStream(bytes, offset, length); + identity = new Guid(stream.ReadBytes(16)); + + int nItems = stream.ReadInteger(); + for (int n = 0; n < nItems; n++) + items.Add(Deserialize(stream)); + } + + public ODBEntity this[int i] + { + get => items[i]; + set => items[i] = value; + } + + public void AddRange(IEnumerable values) + { + foreach (ODBEntity value in values) + Add(value); + } + + public void Add(ODBEntity value) + { + items.Add(value); + } + public void Remove(ODBEntity value) + { + items.Remove(value); + } + public void RemoveAt(int i) + { + items.RemoveAt(i); + } + + public int Count => items.Count; + + public override ODBEntity Clone() + { + ODBList clone = new ODBList(); + clone.identity = identity; + + foreach (ODBEntity item in items) + clone.items.Add(item.Clone()); + + return clone; + } + + public override byte[] Serialize() + { + MemoryStream stream = new MemoryStream(); + BinaryWriter writer = new BinaryWriter(stream); + + writer.Write(identity.ToByteArray()); + writer.Write(items.Count); + + foreach (ODBEntity value in items) + value.Serialize(writer); + + return stream.ToArray(); + } + + protected override int compare(ODBEntity other) + { + ODBList you = other as ODBList; + + int d = Count - you.Count; + if (d != 0) + return d; + + for (int n = 0; n < Count; n++) + { + d = this[n].CompareTo(you[n]); + if (d != 0) + return d; + } + return 0; + } + + public IEnumerator GetEnumerator() + { + return items.GetEnumerator(); + } + + IEnumerator IEnumerable.GetEnumerator() + { + return items.GetEnumerator(); + } + + static ODBList() + { + RegisterDeserializer(0x02, (b, o, l) => new ODBList(b, o, l)); + } + + public override string ToTreeString(int indent) + { + indent += 2; + StringBuilder stringBuilder = new StringBuilder(); + + stringBuilder.AppendFormat("{0} Identity={1} Count={2}", GetType().Name, Identity, Count); + foreach (ODBEntity entity in items) + { + stringBuilder.AppendLine(); + stringBuilder.AppendFormat("{0}{1}", new string(' ', indent), entity.ToTreeString(indent)); + } + + return stringBuilder.ToString(); + } + + } +} diff --git a/catalog/ODBLong.cs b/catalog/ODBLong.cs new file mode 100644 index 0000000..f2c9033 --- /dev/null +++ b/catalog/ODBLong.cs @@ -0,0 +1,63 @@ +using System; +using System.Runtime.CompilerServices; +namespace ln.objects.catalog +{ + public class ODBLong : ODBValue + { + public ODBLong(long value) + : base(0x12, value) + { } + + public override byte[] Serialize() => BitConverter.GetBytes((long)Value); + + protected override int compare(ODBEntity other) + { + long a, b; + a = (long)Value; + b = (long)(other as ODBValue).Value; + + long d = a - b; + if (d == 0) + return 0; + if (d < 0) + return -1; + return 1; + } + + public static implicit operator DateTime(ODBLong l) => DateTimeOffset.FromUnixTimeMilliseconds((long)l.Value).DateTime; + public static implicit operator TimeSpan(ODBLong l) => TimeSpan.FromMilliseconds((long)l.Value); + + + static ODBLong() + { + RegisterDeserializer(0x12, (b, o, l) => new ODBLong(BitConverter.ToInt64(b, o))); + } + + } + + public class ODBULong : ODBValue + { + public ODBULong(ulong value) + : base(0x13, value) + { } + + public override byte[] Serialize() => BitConverter.GetBytes((ulong)Value); + protected override int compare(ODBEntity other) + { + ulong a = (ulong)Value; + ulong b = (ulong)(other as ODBValue).Value; + + if (a == b) + return 0; + if (a < b) + return -1; + return 1; + } + + static ODBULong() + { + RegisterDeserializer(0x13, (b, o, l) => new ODBULong(BitConverter.ToUInt64(b, o))); + } + + } +} diff --git a/catalog/ODBNull.cs b/catalog/ODBNull.cs new file mode 100644 index 0000000..941f116 --- /dev/null +++ b/catalog/ODBNull.cs @@ -0,0 +1,24 @@ +using System; +namespace ln.objects.catalog +{ + + public class ODBNull : ODBValue + { + public static readonly ODBNull Instance = new ODBNull(); + + private ODBNull() + : base(0x00, null) + { } + + public override byte[] Serialize() => new byte[0]; + protected override int compare(ODBEntity other) => 0; + + public override int GetHashCode() => 0; + public override bool Equals(object obj) => ReferenceEquals(this, obj); + + static ODBNull() + { + RegisterDeserializer(0x00, (b, o, l) => Instance); + } + } +} diff --git a/catalog/ODBObject.cs b/catalog/ODBObject.cs new file mode 100644 index 0000000..fee0a56 --- /dev/null +++ b/catalog/ODBObject.cs @@ -0,0 +1,152 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text; + +namespace ln.objects.catalog +{ + public class ODBObject : ODBEntity + { + private Dictionary properties = new Dictionary(); + + public ODBObject() + :base(0x0005) + { } + + public ODBObject(byte[] bytes) + : this(bytes, 0, bytes.Length) + {} + public ODBObject(byte[] bytes,int offset,int length) + :this() + { + int endOffset = offset + length; + + int nProps = BitConverter.ToInt32(bytes, offset); + offset += 4; + + for (int n=0;n endOffset) + throw new FormatException("object deserialization read behind end of buffer"); + } + + public ODBEntity this[ODBEntity propName] + { + get + { + if (properties.ContainsKey(propName)) + return properties[propName]; + return ODBNull.Instance; + } + set + { + if (ODBNull.Instance.Equals(value)) + { + if (properties.ContainsKey(propName)) + properties.Remove(propName); + } + else + { + properties[propName] = value; + } + } + } + public ODBEntity this[string propName] + { + get => this[new ODBStringValue(propName)]; + set => this[new ODBStringValue(propName)] = value; + } + + public IEnumerable Keys => properties.Keys; + + public bool Contains(string propName) => Contains(new ODBStringValue(propName)); + public bool Contains(ODBEntity propName) + { + return !ODBNull.Instance.Equals(this[propName]); + } + + public override ODBEntity Clone() + { + ODBObject clone = new ODBObject(); + foreach (ODBEntity fieldName in properties.Keys) + { + clone[fieldName] = this[fieldName].Clone(); + } + return clone; + } + + public void CloneTo(ODBObject target) + { + target.properties.Clear(); + foreach (ODBEntity fieldName in properties.Keys) + { + target[fieldName] = this[fieldName].Clone(); + } + } + + public override byte[] Serialize() + { + MemoryStream stream = new MemoryStream(); + BinaryWriter writer = new BinaryWriter(stream); + + writer.Write(properties.Count); + + foreach (ODBEntity propName in properties.Keys) + { + ODBEntity propValue = properties[propName]; + propName.Serialize(writer); + propValue.Serialize(writer); + } + + return stream.ToArray(); + } + + public override string ToString() + { + return String.Format("[Object {0}]", String.Join(" ",properties.Select(kv=> String.Format("{0}={1}",kv.Key,kv.Value)))); + } + public override string ToTreeString(int indent) + { + indent += 2; + StringBuilder stringBuilder = new StringBuilder(); + + stringBuilder.AppendFormat("{0} Identity={1} Count={2}", GetType().Name, Identity, properties.Count); + foreach (ODBValue key in properties.Keys) + { + stringBuilder.AppendLine(); + stringBuilder.AppendFormat("{0}{1,-32}: {2}", new String(' ', indent), key, properties[key].ToTreeString(indent)); + } + + return stringBuilder.ToString(); + } + + protected override int compare(ODBEntity e) + { + ODBObject other = e as ODBObject; + ODBEntity[] keys = Keys.Union(other.Keys).ToArray(); + + foreach (ODBEntity key in keys) + { + ODBEntity mine = this[key]; + ODBEntity yours = other[key]; + + int c = mine.CompareTo(yours); + + if (c != 0) + return c; + } + return 0; + } + + static ODBObject() + { + RegisterDeserializer(0x0005, (b,o,l) => new ODBObject(b,o,l)); + } + } +} diff --git a/catalog/ODBStringValue.cs b/catalog/ODBStringValue.cs new file mode 100644 index 0000000..a34e2bc --- /dev/null +++ b/catalog/ODBStringValue.cs @@ -0,0 +1,22 @@ +using System; +using System.Text; +using System.Globalization; + +namespace ln.objects.catalog +{ + public class ODBStringValue : ODBValue + { + + public ODBStringValue(string s) + : base(0x01, s) + { } + + public override byte[] Serialize() => Encoding.UTF8.GetBytes((string)Value); + protected override int compare(ODBEntity other) => ((string)Value).CompareTo((other as ODBValue).Value); + + static ODBStringValue() + { + RegisterDeserializer(0x01, (b, o, l) => new ODBStringValue(Encoding.UTF8.GetString(b, o, l))); + } + } +} diff --git a/catalog/ODBTypedValue.cs b/catalog/ODBTypedValue.cs new file mode 100644 index 0000000..32bbebe --- /dev/null +++ b/catalog/ODBTypedValue.cs @@ -0,0 +1,84 @@ +// /** +// * File: ODBTypedValue.cs +// * Author: haraldwolff +// * +// * This file and it's content is copyrighted by the Author and / or copyright holder. +// * Any use wihtout proper permission is illegal and may lead to legal actions. +// * +// * +// **/ +using System; +using System.IO; +namespace ln.objects.catalog +{ + //public class ODBTypedValue : ODBEntity + //{ + // public ODBEntity ODBValue { get; private set; } + // public Type TargetType { get; private set; } + + // public override uint AsUInt => ODBValue.AsUInt; + + // internal ODBTypedValue() + // : base(0x0020) + // { + // } + // public ODBTypedValue(Type targetType,ODBEntity value) + // : this() + // { + // TargetType = targetType; + // ODBValue = value; + // } + // public ODBTypedValue(byte[] bytes,int offset,int length) + // : this() + // { + // String aname = ODBEntity.Deserialize(bytes, ref offset).AsString; + // String tname = ODBEntity.Deserialize(bytes, ref offset).AsString; + // ODBValue = ODBEntity.Deserialize(bytes, ref offset); + + // TargetType = Type.GetType(String.Format("{0}, {1}", tname, aname)); + // } + + // public override int CompareLevel => 126; + + // public override int CompareInType(ODBEntity other) + // { + // ODBTypedValue typedValue = other as ODBTypedValue; + // return ODBValue.CompareTo(typedValue.ODBValue); + // } + + // public override byte[] GetStorageBytes() + // { + // MemoryStream stream = new MemoryStream(); + // BinaryWriter writer = new BinaryWriter(stream); + + // ODBStringValue aname = TargetType.Assembly.GetName().Name; + // ODBStringValue tname = TargetType.FullName; + + // aname.Write(writer); + // tname.Write(writer); + // ODBValue.Write(writer); + + // return stream.ToArray(); + // } + + // public override int GetHashCode() + // { + // return ODBValue.GetHashCode(); + // } + // public override bool Equals(object obj) + // { + // if (obj is ODBTypedValue) + // { + // ODBTypedValue typedValue = obj as ODBTypedValue; + // return ODBValue.Equals(typedValue.ODBValue); + // } + // return false; + // } + + + // static ODBTypedValue() + // { + // RegisterDeserializer(0x0020, (b, o, l) => new ODBTypedValue(b,o,l)); + // } + //} +} diff --git a/catalog/ODBValue.cs b/catalog/ODBValue.cs new file mode 100644 index 0000000..84f08d9 --- /dev/null +++ b/catalog/ODBValue.cs @@ -0,0 +1,75 @@ +// /** +// * File: ODBValue.cs +// * Author: haraldwolff +// * +// * This file and it's content is copyrighted by the Author and / or copyright holder. +// * Any use wihtout proper permission is illegal and may lead to legal actions. +// * +// * +// **/ +using System; +using System.Text; +namespace ln.objects.catalog +{ + /// + /// The base type of all immutable ODB types + /// + public abstract class ODBValue : ODBEntity + { + public object Value { get; protected set; } + + public override ODBValue Identity => this; + public override ODBEntity Clone() => this; + + protected ODBValue(int storageTypeCode, object value) + : base(storageTypeCode) + { + Value = value; + } + + public override int GetHashCode() => Value.GetHashCode(); + public override bool Equals(object obj) + { + if (GetType().Equals(obj.GetType())) + { + if (obj is ODBValue) + return Equals(Value, (obj as ODBValue).Value); + if (obj is ODBEntity) + return Equals(Identity, (obj as ODBEntity).Identity); + } + return false; + } + + public override string ToString() + { + return string.Format("[{0} Value={1}]", GetType().Name, Value); + } + + public override string ToTreeString(int indent) + { + return string.Format("{0} Value={1}", GetType().Name, Value); + } + + + + + + //public virtual string AsString => As(); + //public virtual bool AsBool => As(); + //public virtual byte AsByte => As(); + //public virtual char AsChar => (char)Value; + //public virtual short AsShort => Convert.ToInt16(Value); + //public virtual int AsInt => Convert.ToInt32(Value); + //public virtual long AsLong => Convert.ToInt64(Value); + //public virtual ushort AsUShort => Convert.ToUInt16(Value); + //public virtual uint AsUInt => (uint)Value; + //public virtual ulong AsULong => (ulong)Value; + + //public virtual double AsDouble => (double)Value; + //public virtual float AsFloat => (float)Value; + + //public virtual Guid AsGuid => (Guid)Value; + //public virtual DateTime AsDateTime => (DateTime)Mapper.Default.UnmapValue(typeof(DateTime), this); + //public virtual TimeSpan AsTimeSpan => (TimeSpan)Mapper.Default.UnmapValue(typeof(TimeSpan), this); + } +} diff --git a/collections/LazyList.cs b/collections/LazyList.cs new file mode 100644 index 0000000..3b87992 --- /dev/null +++ b/collections/LazyList.cs @@ -0,0 +1,105 @@ +using System; +using System.Collections; +using System.Collections.Generic; + +namespace ln.objects.collections +{ + public class LazyList : IList where T:class + { + ObjectStore objectStore; + List listItems = new List(); + + public LazyList(ObjectStore objectStore) + :this(objectStore, new Guid[0]) { } + + public LazyList(ObjectStore objectStore,Guid[] initialItems) + { + this.objectStore = objectStore; + } + + public T this[int index] + { + get + { + ListItem listItem = listItems[index]; + if (Object.ReferenceEquals(null, listItem.Value)) + { + if (Guid.Empty.Equals(listItem.UID)) + return null; + + listItem.Value = objectStore.LoadObject(listItem.UID); + } + return listItem.Value; + } + set + { + ListItem listItem = listItems[index]; + listItem.Value = value; + listItem.UID = Guid.Empty; + } + } + + public int Count => listItems.Count; + public bool IsReadOnly => false; + + public void Add(T item)=> listItems.Add(new ListItem() { Value = item }); + public void Clear() => listItems.Clear(); + public bool Contains(T item) + { + Guid uid = objectStore.GetObjectUID(item); + foreach (ListItem listItem in listItems) + if (listItem.UID.Equals(uid) || Object.ReferenceEquals(item, listItem.Value)) + return true; + return false; + } + + public void CopyTo(T[] array, int arrayIndex) + { + for (int i = 0; i < Count; i++) + array[i + arrayIndex] = this[i]; + } + + IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); + public IEnumerator GetEnumerator() + { + for (int i = 0; i < Count; i++) + yield return this[i]; + } + public int IndexOf(T item) + { + Guid uid = objectStore.GetObjectUID(item); + for (int i = 0; i < Count; i++) + { + ListItem listItem = listItems[i]; + if (listItem.UID.Equals(uid) || Object.ReferenceEquals(item, listItem.Value)) + return i; + } + return -1; + } + + public void Insert(int index, T item) => listItems.Insert(index, new ListItem() { Value = item }); + public bool Remove(T item) + { + Guid uid = objectStore.GetObjectUID(item); + for (int i = 0; i < Count; i++) + { + ListItem listItem = listItems[i]; + if (listItem.UID.Equals(uid) || Object.ReferenceEquals(item, listItem.Value)) + { + listItems.RemoveAt(i); + return true; + } + } + return false; + } + + public void RemoveAt(int index) => listItems.RemoveAt(index); + + class ListItem + { + public Guid UID; // Identifier of reference in objectStore + public T Value; // Loaded Reference + } + + } +} diff --git a/index/Index.cs b/index/Index.cs new file mode 100644 index 0000000..3a84fab --- /dev/null +++ b/index/Index.cs @@ -0,0 +1,25 @@ +using System; +using System.Collections.Generic; +using System.Text; + +namespace ln.objects.index +{ + public abstract class Index + { + + public Index() + { + } + + public abstract void Reindex(Guid uid, object value); + public abstract void Remove(Guid uid); + + public abstract void Match(Func criterion, ISet matches); + + public abstract void Clear(); // Reset this Index as it was newly created + + public abstract bool TrySerializeIndex(out byte[] serializedIndex); // Serialize current state of index to + public abstract bool TryDeserializeIndex(byte[] serializedIndex); // Deserialize current state from + + } +} diff --git a/index/IndexLeaf.cs b/index/IndexLeaf.cs new file mode 100644 index 0000000..1214e39 --- /dev/null +++ b/index/IndexLeaf.cs @@ -0,0 +1,55 @@ +using System; +using System.Collections.Generic; +using System.Text; + +namespace ln.objects.index +{ + public class IndexLeaf + { + public Type LeafType { get; set; } + public string LeafName { get; set; } + + public Index Index { get; set; } + + Dictionary leafs = new Dictionary(); + Func leafGetter; + + public IndexLeaf() + { + } + public IndexLeaf(Type valueType, string leafName, Func getter) + { + LeafType = valueType; + LeafName = leafName; + leafGetter = getter; + } + + public IndexLeaf GetLeaf(string path) => leafs[path]; + + public void AddLeaf(IndexLeaf indexLeaf) => leafs.Add(indexLeaf.LeafName, indexLeaf); + public void RemoveLeaf(string leafName) => leafs.Remove(leafName); + + public void Reindex(Guid uid, object value) + { + Index?.Reindex(uid, value); + foreach (IndexLeaf indexLeaf in leafs.Values) + indexLeaf.Reindex(uid, indexLeaf.leafGetter(value)); + } + public void Remove(Guid uid) + { + Index?.Remove(uid); + foreach (IndexLeaf indexLeaf in leafs.Values) + indexLeaf.Remove(uid); + } + + public void Clear() + { + Index?.Clear(); + foreach (IndexLeaf indexLeaf in leafs.Values) + indexLeaf.Clear(); + } + + public void Match(Func criterion, ISet matches) => Index?.Match(criterion, matches); + + } +} diff --git a/index/SimpleIndex.cs b/index/SimpleIndex.cs new file mode 100644 index 0000000..8aa3a10 --- /dev/null +++ b/index/SimpleIndex.cs @@ -0,0 +1,57 @@ +using ln.collections; +using System; +using System.Collections.Generic; +using System.Text; + +namespace ln.objects.index +{ + class SimpleIndex : Index + { + BTreeValueSet index = new BTreeValueSet(); + BTree reverseIndex = new BTree(); + + public SimpleIndex() + { + } + + public override void Clear() + { + index.Clear(); + reverseIndex.Clear(); + } + + public override void Match(Func criterion, ISet matches) + { + foreach (T ivalue in index.Keys) + { + if (criterion(ivalue)) + matches.UnionWith(index[ivalue]); + } + } + + public override void Reindex(Guid uid, object value) + { + Remove(uid); + index.Add((T)value, uid); + } + + public override void Remove(Guid uid) + { + if (reverseIndex.ContainsKey(uid)) + { + index.TryRemove(reverseIndex[uid], uid); + reverseIndex.TryRemove(uid); + } + } + + public override bool TryDeserializeIndex(byte[] serializedIndex) + { + throw new NotImplementedException(); + } + + public override bool TrySerializeIndex(out byte[] serializedIndex) + { + throw new NotImplementedException(); + } + } +} diff --git a/ln.objects.csproj b/ln.objects.csproj new file mode 100644 index 0000000..c2ac737 --- /dev/null +++ b/ln.objects.csproj @@ -0,0 +1,20 @@ + + + + netcoreapp3.1 + + + + + + + + + + + + + + + + diff --git a/ng/Document.cs b/ng/Document.cs new file mode 100644 index 0000000..118d293 --- /dev/null +++ b/ng/Document.cs @@ -0,0 +1,189 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text; +using ln.objects.catalog; + +namespace ln.types.odb.ng +{ + public class Document : ODBEntity + { + public override ODBValue Identity => new ODBGuid(ID); + + private Dictionary properties = new Dictionary(); + + public Document() + :base(0x1001) + { + ID = Guid.NewGuid(); + } + + public Document(Guid id) + :base(0x1001) + { + ID = id; + } + + public Guid ID { get; } + public DateTime StorageTimeStamp { get; set; } + + public Document(byte[] bytes) + : this(bytes, 0, bytes.Length) + {} + public Document(byte[] bytes,int offset,int length) + :this(new Guid(bytes.Slice(offset, 16))) + { + int endOffset = offset + length; + + offset += 16; // GUID (!!!) -> this(...) + + int nProps = BitConverter.ToInt32(bytes, offset); + offset += 4; + + for (int n=0;n endOffset) + throw new FormatException("Document deserialization read behind end of buffer"); + } + + public ODBEntity this[ODBEntity propName] + { + get + { + if (properties.ContainsKey(propName)) + return properties[propName]; + return ODBNull.Instance; + } + set + { + if (ODBNull.Instance.Equals(value)) + { + if (properties.ContainsKey(propName)) + properties.Remove(propName); + } + else + { + properties[propName] = value; + } + } + } + public ODBEntity this[string propName] + { + get => this[new ODBStringValue(propName)]; + set => this[new ODBStringValue(propName)] = value; + } + + public IEnumerable Keys => properties.Keys; + + public bool Contains(string propName) => Contains(new ODBStringValue(propName)); + public bool Contains(ODBEntity propName) + { + return !ODBNull.Instance.Equals(this[propName]); + } + + public override ODBEntity Clone() + { + Document clone = new Document(ID); + clone.StorageTimeStamp = StorageTimeStamp; + + foreach (ODBEntity fieldName in properties.Keys) + { + clone[fieldName] = this[fieldName].Clone(); + } + return clone; + } + + public void CloneTo(Document target) + { + target.properties.Clear(); + target.StorageTimeStamp = StorageTimeStamp; + foreach (ODBEntity fieldName in properties.Keys) + { + target[fieldName] = this[fieldName].Clone(); + } + } + + public override byte[] GetStorageBytes() + { + MemoryStream stream = new MemoryStream(); + BinaryWriter writer = new BinaryWriter(stream); + + writer.Write(ID.ToByteArray()); + writer.Write(properties.Count); + + foreach (ODBEntity propName in properties.Keys) + { + ODBEntity propValue = properties[propName]; + propName.Write(writer); + propValue.Write(writer); + } + + return stream.ToArray(); + } + + public override string ToString() + { + return String.Format("[Document ID={0} {1}]", ID.ToString(),String.Join(" ",properties.Select(kv=> String.Format("{0}={1}",kv.Key,kv.Value)))); + } + public override string ToTreeString(int indent) + { + indent += 2; + StringBuilder stringBuilder = new StringBuilder(); + + stringBuilder.AppendFormat("{0} Identity={1} Count={2}", GetType().Name, Identity, properties.Count); + foreach (ODBValue key in properties.Keys) + { + stringBuilder.AppendLine(); + stringBuilder.AppendFormat("{0}{1,-32}: {2}", new String(' ', indent), key, properties[key].ToTreeString(indent)); + } + + return stringBuilder.ToString(); + } + + + public override int GetHashCode() + { + return ID.GetHashCode(); + } + public override bool Equals(object obj) + { + if (Equals(GetType(), obj.GetType()) && (obj is Document)) + { + Document you = obj as Document; + return ID.Equals(you.ID); + } + return false; + } + + protected override int compare(ODBEntity e) + { + Document other = e as Document; + ODBEntity[] keys = Keys.Union(other.Keys).ToArray(); + + foreach (ODBEntity key in keys) + { + ODBEntity mine = this[key]; + ODBEntity yours = other[key]; + + int c = mine.CompareTo(yours); + + if (c != 0) + return c; + } + return 0; + } + + public override T As() => (T)Mapper.Default.UnmapValue(typeof(T), this); + + static Document() + { + RegisterDeserializer(0x1001, (b,o,l) => new Document(b,o,l)); + } + } +} diff --git a/ng/IODBMapping.cs b/ng/IODBMapping.cs new file mode 100644 index 0000000..1121d27 --- /dev/null +++ b/ng/IODBMapping.cs @@ -0,0 +1,10 @@ +using System; +using ln.objects.catalog; +namespace ln.types.odb.ng +{ + public interface IODBMapping + { + ODBEntity MapValue(Mapper mapper, object value); + object UnmapValue(Mapper mapper, ODBEntity oval); + } +} diff --git a/ng/IdentityCache.cs b/ng/IdentityCache.cs new file mode 100644 index 0000000..496b1ff --- /dev/null +++ b/ng/IdentityCache.cs @@ -0,0 +1,56 @@ +using System; +using ln.types.collections; +namespace ln.types.odb.ng +{ + public class IdentityCache + { + WeakKeyReferenceDictionary reverseCache = new WeakKeyReferenceDictionary(); + WeakValueDictionary forwardCache = new WeakValueDictionary(); + + public IdentityCache() + { + } + + public bool TryGetValue(Guid identity,out object o) + { + lock (this) + { + return forwardCache.TryGetValue(identity, out o); + } + } + + public bool TryGetValue(Guid identity, out object o, Func instantiator) + { + lock (this) + { + if (!TryGetValue(identity, out o)) + { + o = instantiator(); + forwardCache.Add(identity, o); + reverseCache.Add(o, identity); + } + return false; + } + } + + public bool TryGetIdentity(object o,out Guid identity) + { + lock (this) + { + return reverseCache.TryGetValue(o, out identity); + } + } + + public void Ensure(Guid identity,object o) + { + lock (this) + { + if (!forwardCache.ContainsKey(identity)) + forwardCache.Add(identity, o); + if (!reverseCache.ContainsKey(o)) + reverseCache.Add(o, identity); + } + } + + } +} diff --git a/ng/Mapper.API.cs b/ng/Mapper.API.cs new file mode 100644 index 0000000..40f1735 --- /dev/null +++ b/ng/Mapper.API.cs @@ -0,0 +1,220 @@ +using ln.types.btree; +using System.Collections.Generic; +using System; +using ln.types.odb.ng.index; +using System.Linq; +using System.Collections; +using ln.types.odb.ng.storage; +using ln.types.odb.ng.mappings; +using ln.objects.catalog; + +namespace ln.types.odb.ng +{ + + public partial class Mapper + { + BTree forwardCache = new BTree(); + Dictionary reverseCache = new Dictionary(); + + public IEnumerable Load() => Load(typeof(T),false).Cast(); + public IEnumerable Load(bool refresh) => Load(typeof(T),refresh).Cast(); + public IEnumerable Load(Type type) => Load(type, false); + public IEnumerable Load(Type type,bool refresh) + { + return new MappedObjectEnumeration(this, type, GetDocumentIDs(type),refresh); + } + public object[] LoadArray(Type type) => LoadArray(type, false); + public object[] LoadArray(Type type, bool refresh) + { + MappedObjectEnumeration mappedObjectEnumeration = new MappedObjectEnumeration(this, type, GetDocumentIDs(type), refresh); + object[] objects = new object[mappedObjectEnumeration.Count]; + int n = 0; + foreach (object o in mappedObjectEnumeration) + objects[n++] = o; + return objects; + } + public T Load(Guid documentID) => (T)Load(typeof(T), documentID, false); + public T Load(Guid documentID,bool refresh) => (T)Load(typeof(T), documentID, refresh); + public object Load(Type type, Guid documentID) => Load(type, documentID, false); + public object Load(Type type, Guid documentID,bool refresh) + { + lock (this) + { + if (forwardCache.ContainsKey(documentID)) + { + CachedObject cachedObject = forwardCache[documentID]; + if (refresh) + Refresh(type, cachedObject.Instance); + return cachedObject.Instance; + } + else + { + IStorage storage = StorageContainer.GetStorage(type.FullName); + Document document = storage.Load(documentID); + + object instance = ObjectMapping.UnmapValue(this, document); + + CachedObject cachedObject = new CachedObject(document, instance); + forwardCache.Add(cachedObject.Document.ID, cachedObject); + reverseCache.Add(cachedObject.Instance, cachedObject); + + return cachedObject.Instance; + } + } + } + public IEnumerable Load(Query query) => Load(typeof(T), query, false).Cast(); + public IEnumerable Load(Query query,bool refresh) => Load(typeof(T), query, refresh).Cast(); + public IEnumerable Load(Type type, Query query) => Load(type, query, false); + public IEnumerable Load(Type type,Query query,bool refresh) + { + IEnumerable matchedIDs = GetDocumentIDs(type,query); + + return new MappedObjectEnumeration(this, type, matchedIDs,refresh); + } + + public bool Refresh(T instance) => Refresh(typeof(T), instance); + public bool Refresh(Type type,object instance) + { + if (!reverseCache.TryGetValue(instance, out CachedObject cachedObject)) + return false; + + IStorage storage = StorageContainer.GetStorage(type.FullName); + if (storage.Refresh(cachedObject.Document)) + { + (GetMapping(type) as ClassMapping).Apply(this,cachedObject.Document,cachedObject.Instance); + return true; + } + return false; + } + + + public void Save(T instance) => Save(typeof(T), instance); + public void Save(Type type, object instance) + { + lock (this) + { + IStorage storage = GetStorage(type); + CachedObject cachedObject; + Document document; + + if (reverseCache.ContainsKey(instance)) + { + cachedObject = reverseCache[instance]; + document = (GetMapping(type) as mappings.ClassMapping).MapDocument(this, cachedObject.Document.ID, instance); + + storage.Save(document); + + cachedObject.Document = document; + } + else + { + document = (GetMapping(type) as mappings.ClassMapping).MapDocument(this, Guid.NewGuid(), instance) as Document; + cachedObject = new CachedObject(document, instance); + + storage.Save(document); + + forwardCache.Add(cachedObject.Document.ID, cachedObject); + reverseCache.Add(instance, cachedObject); + } + } + } + + public void Delete(T instance) => Delete(typeof(T),instance); + public void Delete(Type type,object instance) + { + lock (this) + { + if (reverseCache.ContainsKey(instance)) + { + CachedObject cachedObject = reverseCache[instance]; + reverseCache.Remove(instance); + forwardCache.Remove(cachedObject.Document.ID); + + GetStorage(type).Delete(cachedObject.Document.ID); + } + } + } + public void Delete(Guid documentID) => Delete(typeof(T), documentID); + public void Delete(Type type, Guid documentID) + { + lock (this) + { + if (forwardCache.ContainsKey(documentID)) + { + CachedObject cachedObject = forwardCache[documentID]; + reverseCache.Remove(cachedObject.Instance); + forwardCache.Remove(cachedObject.Document.ID); + } + + GetStorage(type).Delete(documentID); + } + } + + public IEnumerable GetDocumentIDs() => GetDocumentIDs(typeof(T)); + public IEnumerable GetDocumentIDs(Type type) + { + IStorage storage = StorageContainer.GetStorage(type.FullName); + return storage.GetDocumentIDs(); + } + + public IEnumerable GetDocumentIDs(string path, Predicate predicate) => GetDocumentIDs(typeof(T), path, predicate); + public IEnumerable GetDocumentIDs(Type type, string path, Predicate predicate) + { + IStorage storage = StorageContainer.GetStorage(type.FullName); + return storage.GetDocumentIDs(path,predicate); + } + + public IEnumerable GetDocumentIDs(Query query) => GetDocumentIDs(typeof(T), query); + public IEnumerable GetDocumentIDs(Type type,Query query) + { + IStorage storage = StorageContainer.GetStorage(type.FullName); + return query.Execute(storage); + } + + public void EnsureIndex(string path) => EnsureIndex(typeof(T), path); + public void EnsureIndex(Type type,string path) + { + path = IndexPath.TranslatePropertyPath(type, path); + IStorage storage = StorageContainer.GetStorage(type.FullName); + storage.EnsureIndex(path); + } + + struct CachedObject + { + public object Instance; + public Document Document; + + public CachedObject(Document document, object instance) + { + Document = document; + Instance = instance; + } + } + + class MappedObjectEnumeration : IEnumerable + { + Mapper mapper; + Type type; + IEnumerable documentIDs; + bool refresh; + + public int Count => documentIDs.Count(); + + public MappedObjectEnumeration(Mapper mapper,Type type,IEnumerable documentIDs,bool refresh) + { + this.mapper = mapper; + this.type = type; + this.documentIDs = documentIDs; + this.refresh = refresh; + } + + public IEnumerator GetEnumerator() + { + foreach (Guid documentID in documentIDs) + yield return mapper.Load(type, documentID, refresh); + } + } + + } + +} \ No newline at end of file diff --git a/ng/Mapper.cs b/ng/Mapper.cs new file mode 100644 index 0000000..3c6a5f8 --- /dev/null +++ b/ng/Mapper.cs @@ -0,0 +1,302 @@ +using System; +using System.Collections.Generic; +using System.Reflection; +using System.Linq; +using System.Collections; +using ln.types.net; +using ln.types.odb.ng.storage; +using ln.collections; +using ln.types.odb.ng.mappings; +using ln.types.odb.ng.storage.fs; +using ln.objects.catalog; + +namespace ln.types.odb.ng +{ + public delegate ODBEntity ODBMap(Mapper mapper, object value); + public delegate object ODBUnmap(Mapper mapper, ODBEntity oval); + public delegate ODBEntity ODBMap(Mapper mapper, T value); + public delegate T ODBUnmap(Mapper mapper, ODBEntity oval); + + public partial class Mapper : IDisposable + { + public static Mapper Default { get; set; } = new Mapper((IStorageContainer)null); + + public IStorageContainer StorageContainer { get; private set; } + public IdentityCache IdentityCache { get; } = new IdentityCache(); + + Dictionary mappings = new Dictionary(); + + mappings.ObjectMapping ObjectMapping { get; } + + public Mapper(string basePath) + :this(new FSStorageContainer(basePath)) + { + StorageContainer.Open(); + } + public Mapper(IStorageContainer storageContainer) + { + if (Default?.StorageContainer == null) + Default = this; + + this.StorageContainer = storageContainer; + + RegisterMapping( + (mapper, value) => new ODBStringValue(value), + (mapper, oval) => oval.As() + ); + RegisterMapping( + (mapper, value) => new ODBInteger(value), + (mapper, oval) => oval.As() + ); + RegisterMapping( + (mapper, value) => new ODBInteger(value), + (mapper, oval) => oval.As() + ); + RegisterMapping( + (mapper, value) => new ODBInteger(value), + (mapper, oval) => oval.As() + ); + + RegisterMapping( + (mapper, value) => new ODBUInteger(value), + (mapper, oval) => oval.As() + ); + RegisterMapping( + (mapper, value) => new ODBUInteger(value), + (mapper, oval) => oval.As() + ); + RegisterMapping( + (mapper, value) => new ODBUInteger(value), + (mapper, oval) => oval.As() + ); + + RegisterMapping( + (mapper, value) => new ODBDouble(value), + (mapper, oval) => oval.As() + ); + RegisterMapping( + (mapper, value) => new ODBDouble(value), + (mapper, oval) => oval.As() + ); + + RegisterMapping( + (mapper, value) => new ODBLong(DateTime.MinValue.Equals(value) ? 0 : new DateTimeOffset(value.ToUniversalTime()).ToUnixTimeMilliseconds() ), + (mapper, oval) => DateTimeOffset.FromUnixTimeMilliseconds(oval.As()).DateTime + ); + RegisterMapping( + (mapper, value) => new ODBDouble(value.TotalMilliseconds), + (mapper, oval) => TimeSpan.FromMilliseconds(oval.As()) + ); + + RegisterMapping( + (mapper, value) => new ODBGuid(value), + (mapper, oval) => oval.As() + ); + + RegisterMapping( + (mapper, value) => new ODBLong(value), + (mapper, oval) => oval.As() + ); + RegisterMapping( + (mapper, value) => new ODBULong(value), + (mapper, oval) => oval.As() + ); + + RegisterMapping( + (mapper, value) => (bool)value ? ODBBool.True : ODBBool.False, + (mapper, oval) => oval.As() + ); + + RegisterMapping( + (mapper, value) => new ODBUInteger(value.AsUInt), + (mapper, oval) => new IPv4(oval.As()) + ); + + RegisterMapping( + (mapper, value) => new ODBByteBuffer(value.ToCIDRBytes()), + (mapper, oval) => new IPv6((oval.As())) + ); + + ObjectMapping = new mappings.ObjectMapping(); + RegisterMapping(typeof(object),ObjectMapping); + } + + public void RegisterMapping(Type nativeType, IODBMapping mapping) + { + lock (mappings) + { + mappings[nativeType] = mapping; + } + } + public void RegisterMapping(Type nativeType, ODBMap map, ODBUnmap unmap) + { + lock (mappings) + { + mappings[nativeType] = new mappings.SimpleMapping(map, unmap); + } + } + public void RegisterMapping(ODBMap map, ODBUnmap unmap) + { + lock (mappings) + { + mappings[typeof(T)] = new mappings.SimpleMapping( + (mapper, value) => map(mapper, (T)value), + (mapper, value) => unmap(mapper, value) + ); + } + } + + public IStorage GetStorage(Type type) => GetStorage(type, type.FullName); + public IStorage GetStorage(Type type,string typeName) + { + IStorage storage = StorageContainer.GetStorage(typeName); + if (!storage.IsOpen) + storage.Open(); + return storage; + } + + public IODBMapping GetMapping() => GetMapping(typeof(T)); + public IODBMapping GetMapping(Type type) + { + lock (this.mappings) + { + if (type == null) + throw new ArgumentNullException(); + + if (mappings.ContainsKey(type)) + return mappings[type]; + + if (typeof(string).Equals(type)) + { + return null; + } + else if (type.IsGenericType && (type.GetGenericTypeDefinition().Equals(typeof(Dictionary<,>)))) + { + mappings.Add(type, new mappings.DictionaryMapping()); + return mappings[type]; + } + else if (type.IsGenericType && (type.GetGenericTypeDefinition().Equals(typeof(List<>)) || type.GetGenericTypeDefinition().Equals(typeof(HashSet<>)))) + { + mappings.Add(type, new mappings.ListMapping(type)); + return mappings[type]; + } + else if (type.GetInterfaces().Contains(typeof(IDictionary))) + { + mappings.Add(type, new mappings.DictionaryMapping()); + return mappings[type]; + } + else if (type.IsArray) + { + mappings.Add(type, new mappings.ListMapping(type)); + return mappings[type]; + } + else if (type.IsEnum) + { + mappings.Add(type, new SimpleMapping( + (mapper, value) => new ODBStringValue(Enum.GetName(type, value)), + (mapper, oval) => Enum.Parse(type, (oval as ODBStringValue).Value as String) + )); + return mappings[type]; + } + else if (!type.IsPrimitive) + { + mappings.Add(type, new mappings.ClassMapping(type)); + return mappings[type]; + } + + return null; + } + } + + + + + //public object GetDocumentID(object o) + //{ + // IODBMapping mapping = GetMapping(o.GetType()); + // if (mapping is mappings.ClassMapping) + // { + // mappings.ClassMapping classMapping = mapping as mappings.ClassMapping; + // return classMapping.getID(o); + // } + // return null; + //} + //public Type GetDocumentIDType(Type type) + //{ + // IODBMapping mapping = GetMapping(type); + // if (mapping is mappings.ClassMapping) + // { + // mappings.ClassMapping classMapping = mapping as mappings.ClassMapping; + // return classMapping.IDType; + // } + // return null; + //} + + public virtual ODBEntity MapValue(object value) + { + if (value == null) + return ODBNull.Instance; + + IODBMapping mapping = GetMapping(value.GetType()); + if (mapping != null) + return mapping.MapValue(this,value); + + throw new NotSupportedException(String.Format("Can't map {0} ({1})",value.GetType(),value)); + } + public virtual object UnmapValue(Type targetType,ODBEntity value) + { + if (ODBNull.Instance.Equals(value)) + return null; + + if (value is Document) + { + Document doc = value as Document; + String asmname = doc["__asm__"].As(); + String typename = doc["__type__"].As(); + + if (typename != null) + targetType = Type.GetType(String.Format("{0}, {1}",typename,asmname)); //Assembly.Load(asmname).GetType(typename); + } + //else if (value is ODBTypedValue) + //{ + // ODBTypedValue typedValue = value as ODBTypedValue; + // targetType = typedValue.TargetType; + //} + + IODBMapping mapping = GetMapping(targetType); + if (mapping != null) + return mapping.UnmapValue(this,value); + + return value.As(targetType); + } + + public virtual object TryImplicitMapping(object value,Type targetType) + { + Type sourceType = value.GetType(); + + foreach (MethodInfo mop in sourceType.GetMethods(BindingFlags.Static | BindingFlags.Public)) + { + if (mop.Name.Equals("op_Implicit") && + (mop.ReturnType.Equals(targetType))) + if( + (sourceType.Equals(mop.GetParameters().FirstOrDefault().ParameterType)) + ) + { + return mop.Invoke(null, new object[] { value }); + } + } + + return value; + } + + public void Dispose() + {} + + public static String GetTypeName(Type type) + { + if (type == null) + return null; + return string.Format("{0}, {1}",type.FullName,type.Assembly.GetName().Name); + } + } +} diff --git a/ng/ObjectCollection.cs b/ng/ObjectCollection.cs new file mode 100644 index 0000000..b48b351 --- /dev/null +++ b/ng/ObjectCollection.cs @@ -0,0 +1,307 @@ +using System; +using System.Collections; +using ln.types.odb.values; +using System.Collections.Generic; +using System.Linq; +namespace ln.types.odb.ng +{ + //public class ObjectCollection : IEnumerable + //{ + // public ODBMapper ODBMapper { get; } + // public ODBCollection DocumentCollection { get; private set; } + + // public Type ElementType { get; } + // public String CollectionName => DocumentCollection.CollectionName; + + // public int Count => DocumentCollection.Count; + + // public Type IDType => ODBMapper.GetDocumentIDType(ElementType); + + // internal ObjectCollection(ODBMapper odbmapper, Type elementType) + // : this(odbmapper, elementType, elementType.FullName) + // { } + // internal ObjectCollection(ODBMapper odbmapper, Type elementType, String collectionAlias) + // { + // ODBMapper = odbmapper; + // ElementType = elementType; + // DocumentCollection = ODBMapper.StorageContainer.GetCollection(elementType.FullName); + // } + + // public object SelectByID(object ID) + // { + // ODBValue documentID = ODBMapper.MapValue(ID); + // return SelectByID(documentID); + // } + // public object SelectByID(ODBValue documentID) + // { + // if (ODBNull.Instance.Equals(documentID)) + // return null; + // lock (this) + // { + // object o = GetCachedObject(documentID); + // if (object.ReferenceEquals(null, o)) + // { + // ODBDocument document = DocumentCollection.GetDocumentByID(documentID); + // o = ODBMapper.UnmapValue(ElementType, document); + // TouchCache(documentID, o); + // } + // return o; + // } + // } + + // public IEnumerable Select(Query query) + // { + // lock (this) + // { + // return new ObjectEnumeration(this, query.Execute(DocumentCollection).ToArray()); + // } + // } + + + // public bool Ensure(object o) + // { + // if (!ElementType.IsInstanceOfType(o)) + // throw new ArgumentException(String.Format("Object needs to be of type {0}", ElementType.FullName), nameof(o)); + + // lock (this) + // { + // ODBDocument document = ODBMapper.MapValue(o) as ODBDocument; + // if (DocumentCollection.Ensure(document)) + // { + // TouchCache(document.ID, o); + // return true; + // } + // return false; + // } + // } + + // public bool Insert(object o) + // { + // lock (this) + // { + // if (!ElementType.IsInstanceOfType(o)) + // throw new ArgumentException(String.Format("Object needs to be of type {0}", ElementType.FullName), nameof(o)); + + // ODBDocument document = ODBMapper.MapValue(o) as ODBDocument; + // if (DocumentCollection.Insert(document)) + // { + // TouchCache(document.ID, o); + // return true; + // } + // return false; + // } + // } + // public bool Update(object o) + // { + // lock (this) + // { + // lock (this) + // { + // if (!ElementType.IsInstanceOfType(o)) + // throw new ArgumentException(String.Format("Object needs to be of type {0}", ElementType.FullName), nameof(o)); + + // ODBDocument document = ODBMapper.MapValue(o) as ODBDocument; + // if (DocumentCollection.Update(document)) + // { + // TouchCache(document.ID, o); + // return true; + // } + // return false; + // } + // } + // } + // public bool Upsert(object o) + // { + // if (!ElementType.IsInstanceOfType(o)) + // throw new ArgumentException(String.Format("Object needs to be of type {0}", ElementType.FullName), nameof(o)); + + // lock (this) + // { + // ODBDocument document = ODBMapper.MapValue(o) as ODBDocument; + // if (DocumentCollection.Upsert(document)) + // { + // TouchCache(document.ID, o); + // return true; + // } + // return false; + // } + // } + + // public bool Delete(object o) => Delete(ODBMapper.MapValue(ODBMapper.GetDocumentID(o))); + // public bool Delete(ODBValue documentID) + // { + // lock (this) + // { + // if (DocumentCollection.Delete(documentID)) + // { + // if (objectCache.ContainsKey(documentID)) + // objectCache.Remove(documentID); + // return true; + // } + // return false; + // } + // } + + + // public bool HasProperty(string propName) + // { + // propName = IndexPath.TranslatePropertyPath(ElementType, propName); + + // ClassMapping classMapping = ODBMapper.GetMapping(ElementType) as ClassMapping; + // if (classMapping != null) + // { + // return classMapping.HasField(propName); + // } + // return false; + // } + + + + // /* Indeces */ + // public void EnsureIndex(string propertyPath, bool unique = false) + // { + // //string translatedPath = IndexPath.TranslatePropertyPath(ElementType, propertyPath); + // //DocumentCollection.EnsureIndex(translatedPath, translatedPath); + // EnsureIndeces(false, new string[] { propertyPath }); + // } + // public void EnsureIndeces(params string[] propertyPaths) => EnsureIndeces(false, propertyPaths); + // public void EnsureIndeces(bool unique, params string[] propertyPaths) + // { + // for (int n = 0; n < propertyPaths.Length; n++) + // propertyPaths[n] = IndexPath.TranslatePropertyPath(ElementType, propertyPaths[n]); + + // DocumentCollection.EnsureIndeces(propertyPaths, false); + // } + + // public void EnsureUniqueness(params string[] propertyPaths) + // { + // for (int n = 0; n < propertyPaths.Length; n++) + // propertyPaths[n] = IndexPath.TranslatePropertyPath(ElementType, propertyPaths[n]); + + // DocumentCollection.EnsureUniqueness(propertyPaths); + // } + + // /* Object Cache */ + // public bool UseStrongCache { get; private set; } + // Dictionary objectCache = new Dictionary(); + + // public void EnableStrongCache(bool enable) + // { + // lock (this) + // { + // if (!enable) + // { + // foreach (ODBValue key in objectCache.Keys.ToArray()) + // { + // if (!(objectCache[key] is WeakReference)) + // objectCache.Remove(key); + // } + // } + // else + // { + // foreach (ODBValue key in objectCache.Keys.ToArray()) + // { + // if ((objectCache[key] is WeakReference)) + // objectCache[key] = (objectCache[key] as WeakReference).Target; + // } + // } + // } + // } + + // private object GetCachedObject(ODBValue documentID) + // { + // if (objectCache.ContainsKey(documentID)) + // { + // object o = objectCache[documentID]; + // if (o is WeakReference) + // { + // WeakReference weak = o as WeakReference; + // if (weak.IsAlive) + // return weak.Target; + // else + // return null; + // } + // return o; + // } + // return null; + // } + + // private void TouchCache(ODBValue documentID, object o) + // { + // if (object.ReferenceEquals(o, null) && objectCache.ContainsKey(documentID)) + // { + // objectCache.Remove(documentID); + // } + // else if (!object.ReferenceEquals(o, null)) + // { + // if (UseStrongCache) + // objectCache[documentID] = o; + // else + // objectCache[documentID] = new WeakReference(o); + // } + // } + + // public object[] GetDocumentIDs() + // { + // return DocumentCollection.Index.Select((arg) => ODBMapper.UnmapValue(IDType, arg)).ToArray(); + // } + + // public IEnumerable GetEnumeration() + // { + // lock (this) + // { + // return new ObjectEnumeration(this, DocumentCollection.Index.ToArray()); + // } + // } + + // public IEnumerator GetEnumerator() + // { + // return GetEnumeration().GetEnumerator(); + // } + + // public void Close() + // { + // DocumentCollection = null; + // } + + + // class ObjectEnumeration : IEnumerable + // { + // ObjectCollection collection; + // IEnumerable documentIDs; + + // public ObjectEnumeration(ObjectCollection collection,IEnumerable documentIDs) + // { + // this.collection = collection; + // this.documentIDs = documentIDs; + // } + + // public IEnumerator GetEnumerator() + // { + // foreach (ODBValue documentID in this.documentIDs) + // { + // yield return this.collection.SelectByID(documentID); + // } + // } + // } + + //} + + + + //public class ObjectCollection : ObjectCollection where T:class + //{ + // public ObjectCollection(ODBMapper odbmapper) + // :base(odbmapper,typeof(T)) + // {} + + // public IEnumerable SelectQuery(Query query) => base.Select(query).Cast(); + // public T Select(object id) => (T)base.SelectByID(id); + + // public bool Ensure(T o) => base.Ensure(o); + // public bool Insert(T o) => base.Insert(o); + // public bool Update(T o) => base.Update(o); + // public bool Upsert(T o) => base.Upsert(o); + // public void Delete(T o) => base.Delete(o); + //} +} diff --git a/ng/Query.cs b/ng/Query.cs new file mode 100644 index 0000000..39d17ad --- /dev/null +++ b/ng/Query.cs @@ -0,0 +1,196 @@ +// /** +// * File: Query.cs +// * Author: haraldwolff +// * +// * This file and it's content is copyrighted by the Author and / or copyright holder. +// * Any use wihtout proper permission is illegal and may lead to legal actions. +// * +// * +// **/ +using System; +using System.Collections.Generic; +using ln.types.btree; +using System.Linq; +using ln.types.odb.ng.index; +using ln.types.odb.ng.storage; +using ln.objects.catalog; + +namespace ln.types.odb.ng +{ + public abstract class Query + { + private Query() + { + } + + public static Query AND(Query query,params Query[] queries) + { + return new IMPL.AND(query, queries); + } + public static Query OR(Query query, params Query[] queries) + { + return new IMPL.OR(query, queries); + } + public static Query NOT(Query query) + { + return new IMPL.NOT(query); + } + + public static Query Equals(string propertyName, object value) => Equals(IndexPath.TranslatePropertyPath(typeof(T), propertyName), Mapper.Default.MapValue(value)); + public static Query Equals(String propertyName, ODBEntity value) + { + if (object.ReferenceEquals(value,null)) + value = ODBNull.Instance; + + return IF(propertyName, (v) => value.CompareTo(v)==0); + } + public static Query EqualsNot(string propertyName, ODBEntity value) => EqualsNot(IndexPath.TranslatePropertyPath(typeof(T), propertyName), value); + public static Query EqualsNot(String propertyName, ODBEntity value) + { + if (object.ReferenceEquals(value, null)) + value = ODBNull.Instance; + + return IF(propertyName, (v) => value.CompareTo(v)!=0); + } + + public static Query Equals(string propertyName, ODBEntity[] values) => Equals(IndexPath.TranslatePropertyPath(typeof(T), propertyName), values); + public static Query Equals(String propertyName, ODBEntity[] values) + { + return IF(propertyName, (v) => { + foreach (ODBEntity value in values) + if (value.CompareTo(v) == 0) + return true; + return false; + }); + } + + public static Query Contains(string propertyName, IEnumerable values) + { + ODBEntity[] oValues = values.Select(v => Mapper.Default.MapValue(v)).ToArray(); + return IF(IndexPath.TranslatePropertyPath(typeof(T), propertyName), v => oValues.Contains(v)); + } + + + public static Query IF(string propertyName, Predicate predicate) => IF(IndexPath.TranslatePropertyPath(typeof(T),propertyName), predicate); + public static Query IF(string propertyName,Predicate predicate) + { + return new IMPL.IF(propertyName, predicate); + } + + public abstract IEnumerable Execute(IStorage storage); + + + class IMPL + { + public class IF : Query + { + public string PropertyName { get; } + public Predicate Predicate { get; } + + public IF(string propertyName,Predicate predicate) + { + PropertyName = propertyName; + Predicate = predicate; + } + + public override IEnumerable Execute(IStorage storage) + { + return storage.GetDocumentIDs(PropertyName,Predicate); + } + } + + public class AND : Query + { + public Query Query { get; } + public Query[] Queries { get; } + + public AND(Query query, params Query[] queries) + { + Query = query; + Queries = queries; + } + + public override IEnumerable Execute(IStorage storage) + { + BTree firstSet = new BTree(); + + firstSet.AddRange( + Query.Execute(storage) + ); + + foreach (Query aQuery in Queries) + { + if (firstSet.Count == 0) + return new Guid[0]; + + BTree nextSet = new BTree(); + + foreach (Guid documentID in aQuery.Execute(storage)) + { + if (firstSet.ContainsKey(documentID)) + nextSet.Add(documentID); + } + + firstSet = nextSet; + } + + return firstSet.Keys; + } + } + + public class OR : Query + { + public Query Query { get; } + public Query[] Queries { get; } + + public OR(Query query, params Query[] queries) + { + Query = query; + Queries = queries; + } + + public override IEnumerable Execute(IStorage storage) + { + BTree result = new BTree(); + + result.TryAddRange( + Query.Execute(storage) + ); + + foreach (Query aQuery in Queries) + { + foreach (Guid documentID in aQuery.Execute(storage)) + result.TryAdd(documentID); + } + + return result.Keys; + } + } + public class NOT : Query + { + public Query Query { get; } + + public NOT(Query query) + { + Query = query; + } + + public override IEnumerable Execute(IStorage storage) + { + BTree notSet = new BTree(); + BTree result = new BTree(); + + notSet.AddRange(Query.Execute(storage)); + + foreach (Guid documentID in storage.GetDocumentIDs()) + { + if (!notSet.ContainsKey(documentID)) + result.Add(documentID); + } + + return result.Keys; + } + } + } + } +} diff --git a/ng/Reference.cs b/ng/Reference.cs new file mode 100644 index 0000000..8f89627 --- /dev/null +++ b/ng/Reference.cs @@ -0,0 +1,36 @@ +using System; +namespace ln.types.odb.ng +{ + //public class Reference where T:class + //{ + // ODBMapper mapper; + // ObjectCollection Collection => mapper.GetCollection(); + + // internal object valueID; + + // public Reference(ODBMapper mapper) + // { + // this.mapper = mapper; + // } + // public Reference(ODBMapper mapper,T value) + // { + // this.mapper = mapper; + // this.Value = value; + // } + + // public T Value + // { + // get + // { + // if (this.valueID == null) + // return null; + // return this.Collection.Select(valueID); + // } + // set + // { + // this.Collection.Ensure(value); + // this.valueID = this.mapper.GetDocumentID(value); + // } + // } + //} +} diff --git a/ng/diff/Diff.cs b/ng/diff/Diff.cs new file mode 100644 index 0000000..76666fc --- /dev/null +++ b/ng/diff/Diff.cs @@ -0,0 +1,57 @@ +using System; +using ln.objects.catalog; +namespace ln.types.odb.ng.diff +{ + public abstract class Diff + { + + public abstract ODBEntity Apply(ODBEntity src); + + public static Diff Construct(ODBEntity src,ODBEntity dst) + { + if (!src.GetType().Equals(dst.GetType())) + { + return new SimpleDiff(dst); + } + else if (src is Document) + { + return new DocumentDiff(src as Document, dst as Document); + } else if (src is ODBList) + { + return new ListDiff(src as ODBList, dst as ODBList); + } + + return new SimpleDiff(dst); + } + + public string TreeString => ToTreeString(0); + public abstract string ToTreeString(int indent); + + + class SimpleDiff : Diff + { + public ODBEntity DestinationValue { get; } + + public SimpleDiff(ODBEntity dst) + { + DestinationValue = dst; + } + + public override ODBEntity Apply(ODBEntity src) + { + return DestinationValue; + } + + public override string ToString() + { + return String.Format("[SimpleDiff DestinationValue={0}]",DestinationValue); + } + + public override string ToTreeString(int indent) + { + return String.Format("= {0}", DestinationValue.ToTreeString(indent+2)); + } + } + + } +} diff --git a/ng/diff/DocumentDiff.cs b/ng/diff/DocumentDiff.cs new file mode 100644 index 0000000..63dcb5d --- /dev/null +++ b/ng/diff/DocumentDiff.cs @@ -0,0 +1,60 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using ln.objects.catalog; + +namespace ln.types.odb.ng.diff +{ + public class DocumentDiff : Diff + { + Dictionary propertyDiffs = new Dictionary(); + + public DocumentDiff(Document src, Document dst) + { + HashSet keys = new HashSet(src.Keys); + foreach (ODBEntity key in dst.Keys) + keys.Add(key); + + foreach (ODBEntity key in keys) + { + if (src[key].CompareTo(dst[key]) != 0) + propertyDiffs.Add(key, Diff.Construct(src[key], dst[key])); + } + } + + public override ODBEntity Apply(ODBEntity src) + { + Document srcDocument = src as Document; + + foreach (ODBValue key in propertyDiffs.Keys) + { + srcDocument[key] = propertyDiffs[key].Apply(srcDocument[key]); + } + + return src; + } + + public override string ToString() + { + return String.Format("[DocumentDiff ChangedProperties=({0})]",string.Join(",",propertyDiffs.Keys)); + } + + public override string ToTreeString(int indent) + { + indent += 2; + StringBuilder stringBuilder = new StringBuilder(); + + stringBuilder.AppendFormat("DocumentDiff ChangedProperties=({0})",string.Join(",",propertyDiffs.Keys)); + foreach (ODBEntity key in propertyDiffs.Keys) + { + stringBuilder.AppendLine(); + stringBuilder.AppendFormat("{0}{1,-32} {2}", new string(' ', indent), key, propertyDiffs[key].ToTreeString(indent)); + } + + return stringBuilder.ToString(); + + } + + } +} diff --git a/ng/diff/ListDiff.cs b/ng/diff/ListDiff.cs new file mode 100644 index 0000000..5f979ee --- /dev/null +++ b/ng/diff/ListDiff.cs @@ -0,0 +1,102 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using ln.objects.catalog; + +namespace ln.types.odb.ng.diff +{ + public class ListDiff : Diff + { + List remove = new List(); + List add = new List(); + Dictionary update = new Dictionary(); + + public ListDiff(ODBList src,ODBList dst) + { + HashSet srcItems = new HashSet(src); + HashSet common = new HashSet(srcItems); + HashSet dstItems = new HashSet(dst); + HashSet commonDst = new HashSet(dstItems); + + common.IntersectWith(dstItems); + commonDst.IntersectWith(srcItems); + + srcItems.ExceptWith(common); + dstItems.ExceptWith(common); + + remove.AddRange(srcItems.Select((i) => i.Identity)); + add.AddRange(dstItems); + + Dictionary srcLookup = new Dictionary(); + foreach (ODBEntity entity in common) + srcLookup.Add(entity.Identity, entity); + + foreach (ODBEntity entity in commonDst) + { + ODBValue identity = entity.Identity; + ODBEntity srcEntity = srcLookup[identity]; + + if (entity.CompareTo(srcEntity) != 0) + update.Add(identity, Diff.Construct(srcEntity, entity)); + } + } + + public override ODBEntity Apply(ODBEntity src) + { + ODBList list = src as ODBList; + + foreach (ODBEntity entity in list.ToArray()) + { + ODBValue identity = entity.Identity; + + if (remove.Contains(identity)) + { + list.Remove(entity); + } else if (update.ContainsKey(identity)) + { + update[identity].Apply(entity); + } + } + + foreach (ODBEntity entity in add) + { + list.Add(entity.Clone()); + } + + return src; + } + + public override string ToString() + { + return base.ToString(); + } + + public override string ToTreeString(int indent) + { + indent += 2; + StringBuilder stringBuilder = new StringBuilder(); + + stringBuilder.AppendFormat("ListDiff Remove={0} Change={1} Add={2}",remove.Count,update.Count,add.Count); + foreach (ODBEntity key in remove) + { + stringBuilder.AppendLine(); + stringBuilder.AppendFormat("{0}-{1}", new string(' ', indent), key); + } + foreach (ODBValue key in update.Keys) + { + stringBuilder.AppendLine(); + stringBuilder.AppendFormat("{0} {1} {2}", new string(' ', indent), key, update[key].ToTreeString(indent)); + } + foreach (ODBEntity key in add) + { + stringBuilder.AppendLine(); + stringBuilder.AppendFormat("{0}+{1}", new string(' ', indent), key.ToTreeString(indent)); + } + + return stringBuilder.ToString(); + + } + + } +} diff --git a/ng/index/Index.cs b/ng/index/Index.cs new file mode 100644 index 0000000..827d4a1 --- /dev/null +++ b/ng/index/Index.cs @@ -0,0 +1,34 @@ +using System; +using System.Collections.Generic; +using ln.objects.catalog; + +namespace ln.types.odb.ng.index +{ + public abstract class Index + { + public string IndexName { get; protected set; } + public Path IndexPath { get; protected set; } + + protected Index() + { + } + protected Index(Path indexPath) + :this() + { + IndexPath = indexPath; + IndexName = IndexPath.Complete; + } + + public virtual void Replace(Guid documentID, ODBEntity value) { + Remove(documentID); + Add(documentID, value); + } + public abstract void Add(Guid documentID, ODBEntity value); + public abstract void Remove(Guid documentID); + + public abstract IEnumerable GetDocumentIDs(Predicate predicate); + + public abstract bool LoadIndex(string basePath, long lastCloseTimestamp); + public abstract bool SaveIndex(string basePath, long lastCloseTimestamp); + } +} diff --git a/ng/index/IndexPath.cs b/ng/index/IndexPath.cs new file mode 100644 index 0000000..b6f00ca --- /dev/null +++ b/ng/index/IndexPath.cs @@ -0,0 +1,435 @@ +using System; +using System.Collections.Generic; +using System.Collections; +using System.Reflection; +using System.Linq; +using System.Runtime.InteropServices.ComTypes; +using ln.objects.catalog; + +namespace ln.types.odb.ng.index +{ + public abstract class IndexPath + { + public static IndexPath Build(Path path) + { + if (path.Element == null) + return new FinalPath(path); + + if (path.Element.Equals("[]")) + return new ListPath(); + + return new DocumentPath(); + } + + public abstract IEnumerable GetIndeces(); + + public abstract bool Ensure(Path path); + public abstract bool Indexed(Path path); + + public virtual void Replace(Guid documentID, ODBEntity value) + { + Remove(documentID); + Add(documentID, value); + } + public abstract void Add(Guid documentID, ODBEntity value); + public abstract void Remove(Guid documentID); + + public abstract IEnumerable GetDocumentIDs(Path path, Predicate predicate); + public virtual IEnumerable GetDocumentIDs(string path, Predicate predicate) + { + return GetDocumentIDs(SplitPath(path), predicate); + } + + + + public class DocumentPath : IndexPath + { + Dictionary children = new Dictionary(); + + public DocumentPath() + { + } + + public override IEnumerable GetDocumentIDs(Path path, Predicate predicate) + { + return children[path.Element].GetDocumentIDs(path.Next, predicate); + } + + public override IEnumerable GetIndeces() + { + return children.Values.SelectMany((arg) => arg.GetIndeces()); + } + + public override bool Indexed(Path path) + { + if (children.ContainsKey(path.Element)) + return children[path.Element].Indexed(path.Next); + return false; + } + + public override void Remove(Guid documentID) + { + foreach (IndexPath next in children.Values) + next.Remove(documentID); + } + + public override void Add(Guid documentID, ODBEntity value) + { + foreach (string childName in children.Keys) + { + children[childName].Add(documentID, (value as Document)[childName]); + } + } + public override void Replace(Guid documentID, ODBEntity value) + { + foreach (string childName in children.Keys) + { + children[childName].Replace(documentID, (value as Document)[childName]); + } + } + + public override bool Ensure(Path path) + { + bool added = false; + if (!children.ContainsKey(path.Element)) + { + children.Add(path.Element, IndexPath.Build(path.Next)); + added = true; + } + return children[path.Element].Ensure(path.Next) || added; + } + } + + class ListPath : IndexPath + { + IndexPath nextPath; + + public ListPath() + { + } + + public override IEnumerable GetIndeces() => nextPath.GetIndeces(); + + public override bool Ensure(Path path) + { + bool added = false; + if (nextPath == null) + { + nextPath = IndexPath.Build(path.Next); + added = true; + } + return nextPath.Ensure(path.Next) || added; + } + + public override bool Indexed(Path path) + { + return nextPath.Indexed(path.Next); + } + + + public override IEnumerable GetDocumentIDs(Path path, Predicate predicate) + { + return nextPath.GetDocumentIDs(path.Next, predicate); + } + + public override void Remove(Guid documentID) + { + nextPath.Remove(documentID); + } + public override void Add(Guid documentID, ODBEntity value) + { + if (!ODBNull.Instance.Equals(value)) + foreach (ODBEntity v in ((ODBList)value)) + { + nextPath.Add(documentID, v); + } + } + } + + class FinalPath : IndexPath + { + Path indexPath; + Index index; + + public FinalPath(Path indexPath) + { + this.indexPath = indexPath; + } + + public override IEnumerable GetIndeces() => new Index[] { index }; + + public override bool Indexed(Path path) => true; + + public override IEnumerable GetDocumentIDs(Path path, Predicate predicate) + { + return index.GetDocumentIDs(predicate); + } + + public override void Remove(Guid documentID) + { + index.Remove(documentID); + } + + public override void Add(Guid documentID, ODBEntity value) + { + index.Add(documentID, value); + } + public override void Replace(Guid documentID, ODBEntity value) + { + index.Replace(documentID, value); + } + + public override bool Ensure(Path path) + { + if (path.Element != null) + throw new NotSupportedException(); + + if (index == null) + { + index = new SimpleIndex(indexPath); + return true; + } + + return false; + } + } + + + + public static Path SplitPath(string path) + { + List pathTokens = new List(); + + foreach (string primaryToken in path.Split('.')) + { + string suffix = ""; + string token = primaryToken; + int i = token.IndexOf('['); + if (i > 0) + { + suffix = token.Substring(i); + token = token.Substring(0, i); + } + + pathTokens.Add(token); + + string s = suffix; + while (s.Length > 0) + { + if (s.StartsWith("[]", StringComparison.InvariantCulture)) + { + pathTokens.Add("[]"); + s = s.Substring(2); + } + else + { + throw new NotSupportedException(); + } + } + } + + return new Path(pathTokens.ToArray()); + } + + public static string TranslatePropertyPath(Type elementType, string propPath) + { + String[] path = propPath.Split('.'); + Type currentType = elementType; + + for (int n = 0; n < path.Length; n++) + { + string suffix = ""; + string token = path[n]; + int i = token.IndexOf('['); + if (i > 0) + { + suffix = token.Substring(i); + token = token.Substring(0, i); + } + + FieldInfo fieldInfo = currentType.GetField(token, BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic); + if (fieldInfo == null) + { + string backingFieldName = String.Format("<{0}>k__BackingField", token); + PropertyInfo propertyInfo = currentType.GetProperty(token, BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance); + if (propertyInfo != null) + { + fieldInfo = currentType.GetField(backingFieldName, BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance); + if (fieldInfo == null) + throw new NotImplementedException("can't identify backing field for property"); + } + else + { + throw new ArgumentOutOfRangeException(nameof(path)); + } + + } + + token = fieldInfo.Name; + currentType = fieldInfo.FieldType; + + string s = suffix; + while (s.Length > 0) + { + if (s.StartsWith("[]", StringComparison.InvariantCulture)) + { + if (currentType.IsArray) + currentType = currentType.GetElementType(); + else if (currentType.IsGenericType) + currentType = currentType.GetGenericArguments()[0]; + else + throw new NotSupportedException(); + + s = s.Substring(2); + } + else + { + throw new NotSupportedException(); + } + } + + path[n] = String.Format("{0}{1}", token, suffix); + } + return String.Join(".", path); + } + + } + + + + //class wait { + // protected Index Index; + + // IndexPath NextIndexPath; + + // protected IndexPath() + // {} + + // protected IndexPath(Index index) + // { + // Index = index; + // } + // public IndexPath(Index index, Queue path) + // :this(index) + // { + // NextIndexPath = CreateNextIndexPath(path); + // } + + // private IndexPath CreateNextIndexPath(Queue path) + // { + // if (path.Count == 0) + // { + // return new IndexUpdatePath(Index); + // } + // else + // { + // return new PropertyPath(propertyIndex, path); + // } + // } + + // public virtual void UpdateIndex(ODBDocument document) + // { + // propertyIndex.TryRemove(document.ID); + // UpdateIndex(document, document); + // } + + // public virtual void UpdateIndex(ODBDocument document, ODBValue element) + // { + // NextIndexPath.UpdateIndex(document, element); + // } + + // public virtual IEnumerable Retrieve(ODBValue element) + // { + // return NextIndexPath.Retrieve(element); + // } + + + // class PropertyPath : IndexPath + // { + // string propertyName; + + // public PropertyPath(PropertyIndex propertyIndex,Queue path) + // :base(propertyIndex) + // { + // propertyName = path.Dequeue(); + + // NextIndexPath = CreateNextIndexPath(path); + + // while (propertyName.EndsWith("[]",StringComparison.InvariantCulture)) + // { + // NextIndexPath = new EnumPath(propertyIndex, NextIndexPath); + // propertyName = propertyName.Substring(0, propertyName.Length - 2); + // } + // } + + // public override void UpdateIndex(ODBDocument document, ODBValue element) + // { + // if (element is ODBDocument) + // { + // ODBDocument edoc = element as ODBDocument; + // NextIndexPath.UpdateIndex(document, edoc[propertyName]); + // } + // } + + // public override IEnumerable Retrieve(ODBValue element) + // { + // if (element is ODBDocument) + // { + // ODBDocument edoc = element as ODBDocument; + // return NextIndexPath.Retrieve(edoc[propertyName]); + // } + // return new ODBValue[0]; + // } + // } + + // class EnumPath : IndexPath + // { + // public EnumPath(PropertyIndex propertyIndex,IndexPath nextIndexPath) + // : base(propertyIndex) + // { + // NextIndexPath = nextIndexPath; + // } + + // public override void UpdateIndex(ODBDocument document, ODBValue element) + // { + // if (element is ODBList) + // { + // ODBList elist = element as ODBList; + // foreach (ODBValue le in elist) + // { + // NextIndexPath.UpdateIndex(document, le); + // } + // } + // } + // public override IEnumerable Retrieve(ODBValue element) + // { + // if (element is ODBList) + // { + // ODBList elist = element as ODBList; + // return elist.SelectMany(e => NextIndexPath.Retrieve(e)); + // } + // return new ODBValue[0]; + // } + // } + + // class IndexUpdatePath : IndexPath + // { + // public IndexUpdatePath(PropertyIndex propertyIndex) + // : base(propertyIndex) + // { + // } + + // public override void UpdateIndex(ODBDocument document, ODBValue element) + // { + // propertyIndex.Add(element, document.ID); + // } + + // public override IEnumerable Retrieve(ODBValue element) + // { + // return new ODBValue[] { element }; + // } + // } + + + //} +} diff --git a/ng/index/Path.cs b/ng/index/Path.cs new file mode 100644 index 0000000..6c716a8 --- /dev/null +++ b/ng/index/Path.cs @@ -0,0 +1,66 @@ +using System; +using System.Collections.Generic; +using System.Collections; +using System.Linq; +using System.Text; +namespace ln.types.odb.ng.index +{ + public class Path + { + public String Element { get; } + public Path Next { get; } + public Path Parent { get; } + + private Path(Path parent,IEnumerator path) + { + Parent = parent; + + if (path.MoveNext()) + { + Element = path.Current; + Next = new Path(this,path); + } + else + { + Element = null; + Next = null; + } + } + public Path(String[] path) + :this(null,((IEnumerable)path).GetEnumerator()) + { + } + + public string Complete + { + get + { + List path = new List(); + Climb(path); + + StringBuilder sb = new StringBuilder(); + + sb.Append(path[0]); + path.RemoveAt(0); + + foreach (String pe in path) + { + if (!pe.Equals("[]")) + sb.Append('_'); + sb.Append(pe); + } + + return sb.ToString(); + } + } + + private void Climb(List path) + { + if (Parent != null) + Parent.Climb(path); + if (Element != null) + path.Add(Element); + } + + } +} diff --git a/ng/index/SimpleIndex.cs b/ng/index/SimpleIndex.cs new file mode 100644 index 0000000..d82bd20 --- /dev/null +++ b/ng/index/SimpleIndex.cs @@ -0,0 +1,108 @@ +using System; +using System.Collections.Generic; +using ln.types.btree; +using System.Linq; +using System.IO; +using ln.logging; +using ln.objects.catalog; + +namespace ln.types.odb.ng.index +{ + public class SimpleIndex : Index + { + BTreeValueList valueIndex = new BTreeValueList(); + BTreeValueList reverseIndex = new BTreeValueList(); + + public SimpleIndex(Path path) + : base(path) + { + } + + public override IEnumerable GetDocumentIDs(Predicate predicate) + { + HashSet matchedIDs = new HashSet(); + foreach (ODBEntity value in valueIndex.Keys) + { + if (predicate(value)) + foreach (Guid id in valueIndex[value]) + matchedIDs.Add(id); + } + return matchedIDs; + } + + public override void Add(Guid documentID, ODBEntity value) + { + valueIndex.Add(value, documentID); + reverseIndex.Add(documentID, value); + } + + public override void Remove(Guid documentID) + { + if (reverseIndex.ContainsKey(documentID)) + { + foreach (ODBEntity value in reverseIndex[documentID].ToArray()) + { + valueIndex.Remove(value, documentID); + } + reverseIndex.Remove(documentID); + } + } + + public override bool LoadIndex(string basePath, long lastCloseTimestamp) + { + if (File.Exists(System.IO.Path.Combine(basePath, String.Format("{0}.idx", IndexName)))) + using (FileStream fileStream = new FileStream(System.IO.Path.Combine(basePath, String.Format("{0}.idx", IndexName)), FileMode.Open)) + { + byte[] indexBytes = fileStream.ReadBytes((int)fileStream.Length); + Document indexDocument = new Document(indexBytes, 0, indexBytes.Length); + + long idxLastCloseTimestamp = indexDocument["LastCloseTimestamp"].As(); + if (idxLastCloseTimestamp != lastCloseTimestamp) + { + Logging.Log(LogLevel.WARNING, "Index timestamp {0} is not matching ( {1} != {2} )", IndexName, idxLastCloseTimestamp, lastCloseTimestamp); + return false; + } + else + { + foreach (ODBEntity key in indexDocument.Keys) + { + if (key is ODBGuid) + { + Guid documentID = key.As(); + ODBList valueList = indexDocument[key] as ODBList; + foreach (ODBEntity value in valueList) + { + Add(documentID, value); + } + + } + } + return true; + } + } + return false; + } + public override bool SaveIndex(string basePath, long lastCloseTimestamp) + { + Document indexDocument = new Document(); + indexDocument["LastCloseTimestamp"] = new ODBLong(lastCloseTimestamp); + + foreach (Guid documentID in reverseIndex.Keys) + { + ODBList valueList = new ODBList(); + valueList.AddRange(reverseIndex[documentID]); + indexDocument[new ODBGuid(documentID)] = valueList; + } + + byte[] indexBytes = indexDocument.GetStorageBytes(); + + using (FileStream fileStream = new FileStream(System.IO.Path.Combine(basePath, String.Format("{0}.idx", IndexName)), FileMode.Create)) + { + fileStream.WriteBytes(indexBytes); + fileStream.Close(); + } + + return true; + } + } +} diff --git a/ng/mappings/ClassMapping.cs b/ng/mappings/ClassMapping.cs new file mode 100644 index 0000000..098efd8 --- /dev/null +++ b/ng/mappings/ClassMapping.cs @@ -0,0 +1,225 @@ +using System; +using System.Reflection; +using System.Collections.Generic; +using ln.logging; +using System.Linq; +using ln.types.collections; +using ln.objects.catalog; + +namespace ln.types.odb.ng.mappings +{ + + public class ClassMapping : IODBMapping + { + public delegate object GetID(object o); + public delegate object SetID(object o, object id); + + public Type MappedType { get; } + public bool IsReferenceType => !MappedType.IsValueType; + + List mappedFields = new List(); + + Func createObjectHook; + Func filterFieldsHook; + + private ClassMapping() { } + + public ClassMapping(Type type) : this(type, null, null) { } + + public ClassMapping(Type type, Func createObjectHook, Func filterFieldsHook) + { + Logging.Log(LogLevel.DEBUG, "Constructing ClassMapping for {0}", type); + MappedType = type; + + this.createObjectHook = createObjectHook; + this.filterFieldsHook = filterFieldsHook; + + AddFields(type); + } + + private void AddFields(Type type) + { + foreach (FieldInfo fieldinfo in type.GetFields(BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public)) + { + if ((filterFieldsHook == null) || filterFieldsHook(fieldinfo)) + mappedFields.Add(fieldinfo); + } + + if ((type != null) && !type.IsValueType && (!typeof(object).Equals(type.BaseType))) + { + AddFields(type.BaseType); + } + } + + public bool HasField(String name) + { + foreach (FieldInfo fieldInfo in mappedFields) + if (fieldInfo.Name.Equals(name)) + return true; + return false; + } + + private object GetObjectForDocument(Mapper mapper,Document document) + { + if (IsReferenceType) + { + if (mapper.IdentityCache.TryGetValue(document.ID, out object o, () => (createObjectHook != null) ? createObjectHook(mapper, document) : Activator.CreateInstance(MappedType, true) )) + return o; + } + + return (createObjectHook != null) ? createObjectHook(mapper,document) : Activator.CreateInstance(MappedType, true); + } + + public object UnmapValue(Mapper mapper,ODBEntity oval) + { + Document document = oval as Document; + object o = GetObjectForDocument(mapper,document); + + Apply(mapper, document, o); + + return o; + } + + public void Apply(Mapper mapper,Document document,object o) + { + foreach (FieldInfo fieldInfo in mappedFields) + { + object fv = mapper.UnmapValue(fieldInfo.FieldType, document[fieldInfo.Name]); + if (!object.ReferenceEquals(fv, null)) + { + Type st = fv.GetType(); + if (st != fieldInfo.FieldType) + { + fv = mapper.TryImplicitMapping(fv, fieldInfo.FieldType); + st = fv.GetType(); + } + if (st != fieldInfo.FieldType) + fv = Convert.ChangeType(fv, fieldInfo.FieldType); + } + if (!object.ReferenceEquals(null, fv)) + fieldInfo.SetValue(o, fv); + } + } + + public Document MapDocument(Mapper mapper,Guid documentID,object value) + { + Document document = new Document(documentID); + document["__asm__"] = new ODBStringValue(value.GetType().Assembly.GetName().Name); + document["__type__"] = new ODBStringValue(value.GetType().FullName); + + foreach (FieldInfo fieldInfo in mappedFields) + { + object fv = fieldInfo.GetValue(value); + ODBEntity ov = null; + + ov = mapper.MapValue(fv); + + document[fieldInfo.Name] = ov; + } + + if (IsReferenceType) + { + mapper.IdentityCache.Ensure(documentID, value); + } + + return document; + } + + public ODBEntity MapValue(Mapper mapper,object value) + { + if (Object.ReferenceEquals(value, null)) + return ODBNull.Instance; + + if (!mapper.IdentityCache.TryGetIdentity(value, out Guid documentID)) + documentID = Guid.NewGuid(); + + return MapDocument(mapper, documentID, value); + } + + public Type GetFieldType(Mapper mapper,string fieldName) + { + foreach (FieldInfo fieldInfo in mappedFields) + if (fieldInfo.Name.Equals(fieldName)) + return fieldInfo.FieldType; + + throw new KeyNotFoundException(); + } + + } + + public class ObjectMapping : IODBMapping + { + + public ODBEntity MapValue(Mapper mapper, object value) + { + return new Document(); + } + + public object UnmapValue(Mapper mapper, ODBEntity oval) + { + if (oval is Document) + { + Document document = oval as Document; + if (!document.Contains(new ODBStringValue("__type__"))) + return new object(); + + Type dType = Type.GetType(String.Format("{0}, {1}",document["__type__"].As(), document["__asm__"].As())); + return mapper.UnmapValue(dType, oval); + } else if (oval is ODBList) + { + ODBList list = oval as ODBList; + Array array = (Array)mapper.UnmapValue(typeof(object[]), list); + + if (array.Length > 0) + { + /* Magic: Try to find out about the arrays real element type */ + HashSet eTypes = null; + + foreach (object v in array) + { + HashSet myTypes = new HashSet(); + + Type eType = v?.GetType(); + while (eType != null) + { + myTypes.Add(eType); + eType = eType.BaseType; + } + + if (eTypes == null) + { + eTypes = myTypes; + } + else + { + eTypes.IntersectWith(myTypes); + } + } + + foreach (Type t in eTypes.ToArray()) + { + if (eTypes.Contains(t.BaseType)) + eTypes.Remove(t.BaseType); + } + + Type baseElementType = eTypes.First(); + if (!array.GetType().GetElementType().Equals(baseElementType)) + { + Array tarray = Array.CreateInstance(baseElementType, array.Length); + Array.Copy(array, tarray, array.Length); + array = tarray; + } + } + + return array; + } + else if (oval is ODBValue) + { + return (oval as ODBValue).Value; + } + + throw new NotImplementedException(); + } + } + +} diff --git a/ng/mappings/DictionaryMapping.cs b/ng/mappings/DictionaryMapping.cs new file mode 100644 index 0000000..38ed60b --- /dev/null +++ b/ng/mappings/DictionaryMapping.cs @@ -0,0 +1,84 @@ +using System; +using System.Linq; +using System.Collections; +using System.Reflection; +using System.Collections.Generic; +using ln.objects.catalog; + +namespace ln.types.odb.ng.mappings +{ + public class DictionaryMapping : IODBMapping + { + public DictionaryMapping() + { + } + + public ODBEntity MapValue(Mapper mapper, object value) + { + Type dType = value.GetType(); + + if (dType.GetInterfaces().Contains(typeof(IDictionary))) + { + IDictionary dictionary = value as IDictionary; + Document document = new Document(); + + document["__asm__"] = new ODBStringValue(value.GetType().Assembly.GetName().Name); + document["__type__"] = new ODBStringValue(value.GetType().FullName); + + Document kTypes = new Document(); + Document vTypes = new Document(); + + document["__ktypes__"] = kTypes; + document["__vtypes__"] = vTypes; + + foreach (object key in dictionary.Keys) + { + object v = dictionary[key]; + ODBEntity okey = mapper.MapValue(key); + + document[okey] = mapper.MapValue(v); + kTypes[okey] = new ODBStringValue(Mapper.GetTypeName(key?.GetType())); + vTypes[okey] = new ODBStringValue(Mapper.GetTypeName(v?.GetType())); + } + return document; + } + throw new NotImplementedException(); + } + + public object UnmapValue(Mapper mapper, ODBEntity oval) + { + Document document = oval as Document; + Type dType = Type.GetType(String.Format("{0}, {1}",document["__type__"].As(),document["__asm__"].As())); //;Assembly.Load(document["__asm__"].AsString).GetType(document["__type__"].AsString); + + if (dType.IsGenericType) + { + IDictionary dictionary = (IDictionary)Activator.CreateInstance(dType, true); + + if (dType.GetGenericTypeDefinition().Equals(typeof(Dictionary<,>))) + { + Type kType = dType.GetGenericArguments()[0]; + Type vType = dType.GetGenericArguments()[1]; + + Document ktypes = document.Contains("__ktypes__") ? document["__ktypes__"] as Document : new Document(); + Document vtypes = document.Contains("__vtypes__") ? document["__vtypes__"] as Document : new Document(); + + foreach (ODBEntity key in document.Keys) + { + string skey = key.As(); + if (!skey.StartsWith("__", StringComparison.InvariantCulture) || !skey.EndsWith("__",StringComparison.InvariantCulture)) + { + Type kt = ktypes.Contains(key) ? Type.GetType(ktypes[key].As()) : kType; + Type vt = vtypes.Contains(key) ? Type.GetType(vtypes[key].As()) : vType; + + dictionary.Add(mapper.UnmapValue(kt, key), mapper.UnmapValue(vt, document[key])); + } + } + + return dictionary; + } + } + + throw new NotSupportedException(); + } + } +} diff --git a/ng/mappings/ListMapping.cs b/ng/mappings/ListMapping.cs new file mode 100644 index 0000000..5beedb0 --- /dev/null +++ b/ng/mappings/ListMapping.cs @@ -0,0 +1,146 @@ +// /** +// * File: ListMapping.cs +// * Author: haraldwolff +// * +// * This file and it's content is copyrighted by the Author and / or copyright holder. +// * Any use wihtout proper permission is illegal and may lead to legal actions. +// * +// * +// **/ +using System; +using System.Collections; +using System.Linq; +using System.Collections.Generic; +using System.Reflection; +using ln.objects.catalog; + +namespace ln.types.odb.ng.mappings +{ + public class ListMapping : IODBMapping + { + public Type TargetType { get; } + + public ListMapping(Type targetType) + { + TargetType = targetType; + } + + + private Array UnmapArray(Mapper mapper, ODBList list) + { + Array array = Array.CreateInstance(TargetType.GetElementType(), list.Count); + for (int n = 0; n < list.Count; n++) + array.SetValue(mapper.UnmapValue(TargetType.GetElementType(), list[n]), n); + return array; + } + private ODBList MapArray(Mapper mapper, Array array) + { + ODBList list = new ODBList(); + for (int n = 0; n < array.Length; n++) + { + list.Add(mapper.MapValue(array.GetValue(n))); + } + return list; + } + + public object UnmapList(Mapper mapper, ODBList list) + { + IList ilist = (IList)Activator.CreateInstance(TargetType, true); + for (int n = 0; n < list.Count; n++) + { + ilist.Add(mapper.UnmapValue(TargetType.GetGenericArguments()[0], list[n])); + } + return ilist; + } + public ODBList MapList(Mapper mapper, object value) + { + ODBList list = new ODBList(); + IList ilist = (IList)value; + for (int n = 0; n < ilist.Count; n++) + list.Add(mapper.MapValue(ilist[n])); + return list; + } + + public object UnmapSet(Mapper mapper, ODBList list) + { + Type entype = typeof(UnmappingEnumeration<>).MakeGenericType(TargetType.GetGenericArguments()); + object en = Activator.CreateInstance(entype, mapper, list); + + return Activator.CreateInstance(TargetType, en ); + } + public ODBList MapSet(Mapper mapper, object value) + { + ODBList list = new ODBList(); + IEnumerable ienum = (IEnumerable)value; + foreach (object item in ienum) + list.Add(mapper.MapValue(item)); + return list; + } + + public object UnmapValue(Mapper mapper, ODBEntity oval) + { + if (TargetType.IsArray) + return UnmapArray(mapper, (ODBList)oval); + + if (TargetType.IsGenericType) + { + if (typeof(List<>).Equals(TargetType.GetGenericTypeDefinition())) + { + return UnmapList(mapper, (ODBList)oval); + } + else if (typeof(HashSet<>).Equals(TargetType.GetGenericTypeDefinition())) + { + return UnmapSet(mapper, (ODBList)oval); + } + } + + throw new NotImplementedException(); + } + + public ODBEntity MapValue(Mapper mapper, object value) + { + if (TargetType.IsArray) + return MapArray(mapper, (Array)value); + + if (TargetType.IsGenericType) + { + if (typeof(List<>).Equals(TargetType.GetGenericTypeDefinition())) + { + return MapList(mapper, value); + } + else if (typeof(HashSet<>).Equals(TargetType.GetGenericTypeDefinition())) + { + return MapSet(mapper, value); + } + } + throw new NotImplementedException(); + } + + class UnmappingEnumeration : IEnumerable + { + Type TargetType; + Mapper mapper; + ODBList list; + + public UnmappingEnumeration(Mapper mapper,ODBList list) + { + TargetType = typeof(T); + this.mapper = mapper; + this.list = list; + } + + public IEnumerator GetEnumerator() + { + foreach (ODBEntity item in list) + yield return (T)mapper.UnmapValue(TargetType, item); + } + + IEnumerator IEnumerable.GetEnumerator() + { + return GetEnumerator(); + } + } + + + } +} diff --git a/ng/mappings/ReferenceMapping.cs b/ng/mappings/ReferenceMapping.cs new file mode 100644 index 0000000..f26aea0 --- /dev/null +++ b/ng/mappings/ReferenceMapping.cs @@ -0,0 +1,45 @@ +using System; +using ln.objects.catalog; + +namespace ln.types.odb.ng.mappings +{ + public class ReferenceMapping : IODBMapping where T:class + { + public Type ReferencedType { get; private set; } + public String ReferencedCollectionName { get; set; } + + public ReferenceMapping() + { + ReferencedType = typeof(T); + ReferencedCollectionName = ReferencedType.FullName; + } + + public ODBValue MapValue(ODBMapper mapper, object value) + { + if (value == null) + { + return ODBNull.Instance; + } + else + { + ClassMapping classMapping = mapper.GetMapping(ReferencedType) as ClassMapping; + if (classMapping == null) + throw new NullReferenceException(String.Format("classMapping not found for Type {0}",ReferencedType)); + + object referenceID = classMapping.getID(value); + return mapper.MapValue(referenceID); + } + } + + public object UnmapValue(ODBMapper mapper, ODBValue oval) + { + ClassMapping classMapping = mapper.GetMapping(ReferencedType) as ClassMapping; + if (classMapping == null) + throw new NullReferenceException(String.Format("classMapping not found for Type {0}", ReferencedType)); + + object referenceID = mapper.UnmapValue(classMapping.IDType, oval); + T referenced = mapper.GetCollection(ReferencedCollectionName).Select(referenceID); + return referenced; + } + } +} diff --git a/ng/mappings/SimpleMapping.cs b/ng/mappings/SimpleMapping.cs new file mode 100644 index 0000000..3fca7f1 --- /dev/null +++ b/ng/mappings/SimpleMapping.cs @@ -0,0 +1,26 @@ +using System; +using ln.objects.catalog; +namespace ln.types.odb.ng.mappings +{ + public class SimpleMapping : IODBMapping + { + ODBMap map; + ODBUnmap unmap; + + public SimpleMapping(ODBMap map, ODBUnmap unmap) + { + this.map = map; + this.unmap = unmap; + } + + public ODBEntity MapValue(Mapper mapper, object value) + { + return map(mapper, value); + } + + public object UnmapValue(Mapper mapper, ODBEntity oval) + { + return unmap(mapper, oval); + } + } +} diff --git a/ng/storage/IStorage.cs b/ng/storage/IStorage.cs new file mode 100644 index 0000000..66108a0 --- /dev/null +++ b/ng/storage/IStorage.cs @@ -0,0 +1,64 @@ +using System; +using System.Collections.Generic; +using ln.objects.catalog; + +namespace ln.types.odb.ng.storage +{ + public interface IStorage : IDisposable + { + bool Open(); + void Close(); + bool IsOpen { get; } + + /// + /// Load the specified documentID. + /// + /// The loaded Document. + /// Document identifier. + /// + /// Non caching storage will return the latest version of the document with each call. + /// Caching storage will return the same Document instance with each call, as long as the document stayed in the cache. + /// No refreshing is done if a cached instance is returned! + /// + Document Load(Guid documentID); + + /// + /// Refresh the specified document. + /// + /// if the document was refreshed successfully + /// Document to be refreshed. Must have been loaded by a call to Load(..) of the same IStorage instance + /// will reload the document from storage and apply the loaded state to this instance. + /// If references an instance that has not been returned by a call to Load(..) (e.g. by using Clone()), internal caches may not be affected by the refresh. + /// + bool Refresh(Document document); + + /// + /// Save the specified document. + /// + /// Document to store + /// Non caching storage will store the Document with its full state. + /// Caching storage, e.g. SessionStorage, may only store changes to storage. Please refer to class specific documentation. + /// + void Save(Document document); + + /// + /// Delete the specified documentID from storage. + /// + /// Document identifier + /// Will remove the Document identified by documentID from storage. + /// + void Delete(Guid documentID); + + IDisposable Lock(); + + bool Contains(Guid documentID); + IEnumerable GetDocumentIDs(); + IEnumerable GetDocumentIDs(string path,Predicate predicate); + DateTime GetStorageTimestamp(Guid documentID); + + void EnsureIndex(params string[] path); + + bool IsCaching { get; } + + } +} diff --git a/ng/storage/IStorageContainer.cs b/ng/storage/IStorageContainer.cs new file mode 100644 index 0000000..d0aac0e --- /dev/null +++ b/ng/storage/IStorageContainer.cs @@ -0,0 +1,23 @@ +// /** +// * File: IStorageContainer.cs +// * Author: haraldwolff +// * +// * This file and it's content is copyrighted by the Author and / or copyright holder. +// * Any use wihtout proper permission is illegal and may lead to legal actions. +// * +// * +// **/ +using System; +using System.Collections.Generic; +namespace ln.types.odb.ng.storage +{ + public interface IStorageContainer : IDisposable + { + IStorageContainer Open(); + void Close(); + bool IsOpen { get; } + + IStorage GetStorage(string storageName); + IEnumerable GetStorageNames(); + } +} diff --git a/ng/storage/OrganizedFile.cs b/ng/storage/OrganizedFile.cs new file mode 100644 index 0000000..3e52a20 --- /dev/null +++ b/ng/storage/OrganizedFile.cs @@ -0,0 +1,227 @@ +using System; +using System.IO; +using ln.types.btree; +using System.Collections.Generic; +using System.Linq; +using ln.objects.catalog; + +namespace ln.types.odb.ng.storage +{ + public class OrganizedFile : IDisposable + { + public String FileName { get; private set; } + public OrganizedFileHeader FileHeader { get; private set; } = new OrganizedFileHeader(); + + public int CurrentStoreLength { get; private set; } + + FileStream fileStream; + FileStream lckFileStream; + + MappingBTree freeAreas = new MappingBTree((a)=>a.Offset); + MappingBTree usedAreas = new MappingBTree((a) => a.Offset); + + public OrganizedFile(string filename) + { + FileName = filename; + lckFileStream = new FileStream(string.Format("{0}.lck", FileName), FileMode.CreateNew); + fileStream = new FileStream(filename, FileMode.OpenOrCreate, FileAccess.ReadWrite); + + if (fileStream.Length > 0) + { + FileHeader.Read(fileStream); + + int nextOffset = FileHeader.FirstOffset; + + while (nextOffset < fileStream.Length) + { + OrganizedFileArea fileArea = new OrganizedFileArea(fileStream, nextOffset); + + if (fileArea.ReadTypeCode(fileStream) == 0) + freeAreas.Add(fileArea); + else + usedAreas.Add(fileArea); + + nextOffset = fileArea.NextOffset; + } + CurrentStoreLength = nextOffset; + } + } + + public void Close() + { + if (fileStream != null) + { + lock (fileStream) + { + FileHeader.Write(fileStream); + + fileStream.Close(); + fileStream.Dispose(); + lckFileStream.Close(); + lckFileStream.Dispose(); + File.Delete(string.Format("{0}.lck", FileName)); + + freeAreas.Clear(); + usedAreas.Clear(); + } + fileStream = null; + lckFileStream = null; + } + } + + public IEnumerable UsedAreas => usedAreas; + public void Clear(OrganizedFileArea fileArea) + { + lock (fileStream) + { + usedAreas.TryRemove(fileArea); + fileArea.Clear(fileStream); + freeAreas.Add(fileArea); + } + } + public OrganizedFileArea Store(ODBEntity value) + { + byte[] storageBytes = value.GetStorageBytes(); + return Store(storageBytes); + } + public OrganizedFileArea Store(byte[] bytes) + { + lock (fileStream) + { + OrganizedFileArea fileArea = freeAreas.Where((a) => a.MaxBytes >= bytes.Length).FirstOrDefault(); + if (fileArea == null) + { + fileArea = new OrganizedFileArea(CurrentStoreLength, bytes.Length + 4); + CurrentStoreLength = fileArea.NextOffset; + } + else + { + freeAreas.Remove(fileArea); + } + fileArea.Store(fileStream, bytes); + usedAreas.Add(fileArea); + + return fileArea; + } + } + + public void Dispose() => Close(); + + public class OrganizedFileHeader + { + public readonly UInt32 CurrentFileMagic = 0x00EEFFC0; + + public UInt32 Magic; + public OrganizedFileType FileType; + public Int32 FirstOffset; + public UInt32 Reserve0; + public UInt64 OpenCounter; + + public OrganizedFileHeader() + { + Magic = CurrentFileMagic; + FileType = OrganizedFileType.DATA; + FirstOffset = 1024; + Reserve0 = 0; + OpenCounter = 0; + } + + public void Read(FileStream stream) + { + stream.Position = 0; + + Magic = stream.ReadUInteger(); + if (Magic != CurrentFileMagic) + throw new FormatException("File Magic does not match! Possibly corrupted file!"); + FileType = (OrganizedFileType)stream.ReadInteger(); + FirstOffset = stream.ReadInteger(); + Reserve0 = stream.ReadUInteger(); + OpenCounter = stream.ReadULong(); + OpenCounter++; + } + + public void Write(FileStream stream) + { + stream.Position = 0; + + stream.WriteUInteger(Magic); + stream.WriteInteger((int)FileType); + stream.WriteInteger(FirstOffset); + stream.WriteUInteger(Reserve0); + stream.WriteULong(OpenCounter); + } + + } + + public class OrganizedFileArea : IComparable,IComparable + { + public Int32 Offset; + public Int32 Size; + public Int32 MaxBytes => Size - 4; + public Int32 NextOffset => Offset + Size; + + public OrganizedFileArea(int offset,int size) + { + Offset = offset; + Size = size; + } + public OrganizedFileArea(FileStream fileStream,int offset) + { + Offset = offset; + + fileStream.Position = Offset; + Size = fileStream.ReadInteger(); + } + + public int ReadTypeCode(FileStream fileStream) + { + fileStream.Position = Offset + 4; + return fileStream.ReadInteger(); + } + + public void Store(FileStream fileStream,byte[] buffer) + { + if (MaxBytes < buffer.Length) + throw new ArgumentException("Area too small for buffer to fit", nameof(buffer)); + + fileStream.Position = Offset; + fileStream.WriteInteger(Size); + fileStream.Write(buffer, 0, buffer.Length); + + if (buffer.Length < MaxBytes) + { + byte[] padding = new byte[MaxBytes - buffer.Length]; + fileStream.Write(padding, 0, padding.Length); + } + } + public byte[] Load(FileStream fileStream) + { + byte[] buffer = new byte[MaxBytes]; + fileStream.Position = Offset + 4; + fileStream.Read(buffer, 0, buffer.Length); + return buffer; + } + public void Clear(FileStream fileStream) + { + byte[] zero = new byte[MaxBytes]; + fileStream.WriteInteger(Size); + fileStream.Write(zero,0,zero.Length); + } + + + public int CompareTo(OrganizedFileArea other) + { + return Offset - other.Offset; + } + + public int CompareTo(object obj) + { + if (obj is OrganizedFileArea) + { + return Offset - (obj as OrganizedFileArea).Offset; + } + throw new NotSupportedException(); + } + } + } +} diff --git a/ng/storage/OrganizedFileType.cs b/ng/storage/OrganizedFileType.cs new file mode 100644 index 0000000..fe4daba --- /dev/null +++ b/ng/storage/OrganizedFileType.cs @@ -0,0 +1,9 @@ +using System; +namespace ln.types.odb.ng.storage +{ + public enum OrganizedFileType : Int32 + { + DATA, + INDEX + } +} diff --git a/ng/storage/SegmentedFile.cs b/ng/storage/SegmentedFile.cs new file mode 100644 index 0000000..8cb2ac1 --- /dev/null +++ b/ng/storage/SegmentedFile.cs @@ -0,0 +1,308 @@ +// /** +// * File: SegmentedFile.cs +// * Author: haraldwolff +// * +// * This file and it's content is copyrighted by the Author and / or copyright holder. +// * Any use wihtout proper permission is illegal and may lead to legal actions. +// * +// * +// **/ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using ln.logging; +using ln.types.btree; + +namespace ln.types.odb.ng.storage +{ + /** + * SegmentedFile + * ---------- + * 0000 4 MAGIC Bytes + * 0004 4 Version + * 0008 8 LastCloseTimestamp + * 0010 4 FirstOffset + * 0014 4 GranularWidth + * 0018 8 Reserved 0 + * + **/ + public class SegmentedFile + { + + public static byte[] MagicBytes { get; } = new byte[] { 0x0F, 0x0E, 0x0D, 0x0A }; + + public String FileName { get; } + + public int FileVersion { get; private set; } + public long LastCloseTimestamp { get; private set; } + public int FirstOffset { get; private set; } + + public int GranularWidth { get; private set; } = 12; + public int GranularityMask => (1 << GranularWidth) - 1; + + public int AppendOffset { get; private set; } + + public IEnumerable Segments => segments; + + MappingBTree segments = new MappingBTree((s) => s.Offset); + + FileStream fileStream; + + public SegmentedFile(string fileName) + { + FileName = fileName; + } + public SegmentedFile(string fileName,int granularWidth) + :this(fileName) + { + GranularWidth = granularWidth; + } + + private void AssertOpen() + { + if (fileStream == null) + throw new IOException("FSStorage not opened"); + } + + private void CheckGranularity(ref int i){ i = (i + GranularityMask) & ~GranularityMask; } + + public bool IsOpen => (fileStream != null); + public bool Open() + { + if (!IsOpen) + { + try + { + fileStream = new FileStream(FileName, FileMode.OpenOrCreate); + + if (fileStream.Length == 0) + { + FileVersion = 0; + LastCloseTimestamp = 0; + FirstOffset = (1 << GranularWidth); + if (FirstOffset < 0x20) + throw new NotSupportedException("Granularity too small"); + + AppendOffset = FirstOffset; + + Close(); + return Open(); + } + else + { + if (!fileStream.ReadBytes(4).SequenceEqual(MagicBytes)) + throw new IOException("Magic bytes do not match"); + + FileVersion = fileStream.ReadInteger(); + LastCloseTimestamp = fileStream.ReadLong(); + FirstOffset = fileStream.ReadInteger(); + GranularWidth = fileStream.ReadInteger(); + + Scan(); + + fileStream.Position = 8; + fileStream.WriteLong(0); + + fileStream.Flush(); + } + } + catch (Exception e) + { + Logging.Log(e); + if (fileStream != null) + { + fileStream.Close(); + fileStream.Dispose(); + fileStream = null; + } + return false; + } + + return true; + } + return false; + } + + public Segment Append(Guid id,byte[] payload) => Append(id, payload.Length, payload); + public Segment Append(Guid id, int dataSize) => Append(id, dataSize); + public Segment Append(Guid id, int dataSize, byte[] payload) + { + dataSize += Segment.HeaderSize; + CheckGranularity(ref dataSize); + Segment segment = new Segment(AppendOffset, dataSize) { ID = id, }; + + Write(segment, payload); + + segments.Add(segment); + + AppendOffset = segment.NextOffset; + + return segment; + } + + public Segment Join(Segment a,Segment b) + { + if (a.NextOffset != b.Offset) + throw new ArgumentException("Segments to join are not siblings"); + + a.Size += b.Size; + WriteSegmentHead(a); + segments.Remove(b); + + return a; + } + + public Segment Split(Segment segment,int dataSize) + { + int requestedSize = dataSize + Segment.HeaderSize; + CheckGranularity(ref requestedSize); + + if (requestedSize < segment.Size) + { + Segment split = new Segment(segment.Offset + requestedSize,segment.Size - requestedSize); + segment.Size = requestedSize; + + segments.Add(split); + + WriteSegmentHead(split); + WriteSegmentHead(segment); + + return split; + } + return null; + } + + public byte[] Read(Segment segment) + { + fileStream.Position = segment.PayloadOffset; + return fileStream.ReadBytes(segment.PayloadSize); + } + + private void WriteSegmentHead(Segment segment) + { + fileStream.Position = segment.Offset; + fileStream.WriteInteger(segment.Size); + fileStream.WriteBytes(segment.ID.ToByteArray()); + fileStream.WriteDouble(segment.TimeStamp.ToUnixTimeMilliseconds()); + } + + public void Write(Segment segment,byte[] bytes) + { + AssertOpen(); + + if (bytes.Length > (segment.PayloadSize)) + throw new ArgumentOutOfRangeException(nameof(bytes)); + + segment.TimeStamp = DateTime.Now; + + WriteSegmentHead(segment); + + fileStream.Position = segment.PayloadOffset; + fileStream.WriteBytes(bytes); + fileStream.WriteBytes(new byte[segment.PayloadSize - bytes.Length]); + } + + /** + * Position fileStream to offset, read Segment Header and construct a Segment instance to return + **/ + private Segment ScanSegment(int offset) + { + fileStream.Position = offset; + + int size = fileStream.ReadInteger(); + byte[] id = fileStream.ReadBytes(16); + double timestamp = fileStream.ReadDouble(); + + return new Segment(offset, size, DateTimeExtensions.FromUnixTimeMilliseconds(timestamp)) { ID = new Guid(id), }; + } + + /** + * Start at First Segment Offset and scan for all Segments in file + **/ + private void Scan() + { + int offset = FirstOffset; + Segment segment = null; + + while (offset < fileStream.Length) + { + segment = ScanSegment(offset); + segments.Add(segment); + offset = segment.NextOffset; + } + AppendOffset = offset; + } + + public void Close() + { + lock (this){ + AssertOpen(); + + fileStream.Position = 0; + fileStream.WriteBytes(MagicBytes); + fileStream.WriteInteger(FileVersion); + LastCloseTimestamp = (long)DateTime.Now.ToUnixTimeMilliseconds(); + fileStream.WriteLong(LastCloseTimestamp); + fileStream.WriteInteger(FirstOffset); + fileStream.WriteInteger(GranularWidth); + + fileStream.Close(); + fileStream.Dispose(); + fileStream = null; + } + } + + public void Sync() + { + lock (this) + { + fileStream.Flush(); + } + } + + public class Segment + { + public static readonly int HeaderSize = 32; + + public int Offset { get; } + public int Size { get; set; } + + public int PayloadOffset => Offset + HeaderSize; + public int PayloadSize => Size - HeaderSize; + + public Guid ID { get; set; } + public DateTime TimeStamp { get; set; } + + public int NextOffset => Offset + Size; + + public Segment(int offset, int size) + { + Offset = offset; + Size = size; + } + public Segment(int offset, int size,DateTime timestamp) + :this(offset,size) + { + TimeStamp = timestamp; + } + + public Segment Split(int splitSize) + { + if (splitSize >= Size) + throw new ArgumentOutOfRangeException(nameof(splitSize)); + + Segment splitArea = new Segment(Offset + Size - splitSize, splitSize); + Size -= splitSize; + + return splitArea; + } + + public override string ToString() + { + return string.Format("[StorageArea Offset=0x{0:x8} Size=0x{1:x8}]", Offset, Size); + } + } + + } +} diff --git a/ng/storage/StorageArea.cs b/ng/storage/StorageArea.cs new file mode 100644 index 0000000..3965216 --- /dev/null +++ b/ng/storage/StorageArea.cs @@ -0,0 +1,45 @@ +// /** +// * File: StorageArea.cs +// * Author: haraldwolff +// * +// * This file and it's content is copyrighted by the Author and / or copyright holder. +// * Any use wihtout proper permission is illegal and may lead to legal actions. +// * +// * +// **/ +using System; +namespace ln.types.odb.ng.storage +{ + public class StorageArea + { + public int Offset { get; } + public int Size { get; set; } + + public Guid ID { get; set; } + public DateTime TimeStamp { get; set; } + + public int NextOffset => Offset + Size; + + public StorageArea(int offset,int size) + { + Offset = offset; + Size = size; + } + + public StorageArea Split(int splitSize) + { + if (splitSize >= Size) + throw new ArgumentOutOfRangeException(nameof(splitSize)); + + StorageArea splitArea = new StorageArea(Offset + Size - splitSize,splitSize); + Size -= splitSize; + + return splitArea; + } + + public override string ToString() + { + return string.Format("[StorageArea Offset=0x{0:x8} Size=0x{1:x8}]",Offset,Size); + } + } +} diff --git a/ng/storage/StorageAreaContainer.cs b/ng/storage/StorageAreaContainer.cs new file mode 100644 index 0000000..808f33b --- /dev/null +++ b/ng/storage/StorageAreaContainer.cs @@ -0,0 +1,79 @@ +// /** +// * File: StorageAreaContainer.cs +// * Author: haraldwolff +// * +// * This file and it's content is copyrighted by the Author and / or copyright holder. +// * Any use wihtout proper permission is illegal and may lead to legal actions. +// * +// * +// **/ +using System; +using ln.types.btree; +using ln.logging; +namespace ln.types.odb.ng.storage +{ + public class StorageAreaContainer + { + public bool DEBUG = false; + + public int SplitLimit { get; set; } = 32; + + MappingBTree storageAreas = new MappingBTree((value)=>value.Offset); + + public StorageAreaContainer() + { + } + + public StorageArea Push(StorageArea storageArea) + { + if (DEBUG) + Logging.Log(LogLevel.DEBUG, "StorageAreaContainer: Push: {0}", storageArea); + + storageAreas.Add(storageArea); + + try + { + StorageArea previousStorageArea = storageAreas.Previous(storageArea); + if ((previousStorageArea != null) && (previousStorageArea.NextOffset == storageArea.Offset)) + { + previousStorageArea.Size += storageArea.Size; + storageAreas.Remove(storageArea); + storageArea = previousStorageArea; + } + } catch + { + } + + try + { + StorageArea nextStorageArea = storageAreas.Next(storageArea); + if ((nextStorageArea != null) && (storageArea.NextOffset == nextStorageArea.Offset)) + { + storageArea.Size += nextStorageArea.Size; + storageAreas.Remove(nextStorageArea); + } + } catch + { } + + + return storageArea; + } + + public StorageArea Pop(int minSize) + { + foreach (StorageArea storageArea in storageAreas) + { + if (storageArea.Size >= minSize) + { + if (DEBUG) + Logging.Log(LogLevel.DEBUG, "StorageAreaContainer: Pop: {0}", storageArea); + + storageAreas.RemoveKey(storageArea.Offset); + return storageArea; + } + } + return null; + } + + } +} diff --git a/ng/storage/bases/ChainedStorage.cs b/ng/storage/bases/ChainedStorage.cs new file mode 100644 index 0000000..0c001da --- /dev/null +++ b/ng/storage/bases/ChainedStorage.cs @@ -0,0 +1,43 @@ +using System; +using System.Collections.Generic; +using ln.objects.catalog; +using ln.types.threads; + +namespace ln.types.odb.ng.storage.bases +{ + public abstract class ChainedStorage : IStorage + { + public IStorage Storage { get; private set; } + + public bool IsOpen => Storage.IsOpen; + + public ChainedStorage(IStorage storage) + { + Storage = storage; + } + + public virtual bool Open() => Storage.Open(); + public virtual void Close() => Storage.Close(); + public virtual Document Load(Guid documentID) => Storage.Load(documentID); + public virtual void Save(Document document) => Storage.Save(document); + public virtual void Delete(Guid documentID) => Storage.Delete(documentID); + + public virtual bool Contains(Guid documentID) => Storage.Contains(documentID); + public virtual IEnumerable GetDocumentIDs() => Storage.GetDocumentIDs(); + public virtual IEnumerable GetDocumentIDs(string path, Predicate predicate) => Storage.GetDocumentIDs(path, predicate); + + public virtual DateTime GetStorageTimestamp(Guid documentID) => Storage.GetStorageTimestamp(documentID); + public virtual void EnsureIndex(params string[] path) => Storage.EnsureIndex(path); + public virtual bool Refresh(Document document) => Storage.Refresh(document); + + public virtual IDisposable Lock() => new DisposableLock(this); + + public virtual void Dispose() + { + Storage.Dispose(); + Storage = null; + } + + public abstract bool IsCaching { get; } + } +} diff --git a/ng/storage/bases/StorageBase.cs b/ng/storage/bases/StorageBase.cs new file mode 100644 index 0000000..ec34199 --- /dev/null +++ b/ng/storage/bases/StorageBase.cs @@ -0,0 +1,60 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using ln.types.btree; +using System.Threading; +using System.Runtime.ConstrainedExecution; +using ln.types.threads; +using ln.objects.catalog; + +namespace ln.types.odb.ng.storage.bases +{ + public abstract class StorageBase : IStorage + { + BTree lockedDocuments = new BTree(); + + public StorageBase() + { + } + public abstract bool Open(); + public abstract void Close(); + public abstract bool IsOpen { get; } + + + public abstract Document Load(Guid documentID); + public abstract void Save(Document document); + public abstract void Delete(Guid documentID); + + public virtual DateTime GetStorageTimestamp(Guid documentID) + { + Document doc = Load(documentID); + return doc.StorageTimeStamp; + } + + /* Enumeration */ + public abstract IEnumerable GetDocumentIDs(); + public abstract IEnumerable GetDocumentIDs(string path, Predicate predicate); + + /* Indeces */ + public abstract void EnsureIndex(params string[] path); + + public virtual void Dispose() + { + if (IsOpen) + Close(); + } + + public virtual bool Contains(Guid documentID) + { + return GetDocumentIDs().Contains(documentID); + } + + public virtual IDisposable Lock() + { + return new DisposableLock(this); + } + + public abstract bool Refresh(Document document); + public abstract bool IsCaching { get; } + } +} diff --git a/ng/storage/cache/CachingStorage.cs b/ng/storage/cache/CachingStorage.cs new file mode 100644 index 0000000..068aa95 --- /dev/null +++ b/ng/storage/cache/CachingStorage.cs @@ -0,0 +1,97 @@ +using System; +using System.Collections.Generic; +using ln.types.odb.ng.storage.bases; +using ln.types.btree; +using ln.types.cache; +using NUnit.Framework; +using ln.types.threads; +using ln.objects.catalog; + +namespace ln.types.odb.ng.storage.cache +{ + public class CachingStorage : StorageBase + { + public int CacheSize => cache.Count; + public int MaxCacheSize + { + get => cache.MaxCacheSize; + set => cache.MaxCacheSize = value; + } + + IStorage storage; + + Cache cache = new Cache(); + + public CachingStorage(IStorage storage) + { + this.storage = storage; + } + + public override bool IsOpen => storage.IsOpen; + + public override void Close() + { + if (IsOpen) + { + cache.Clear(); + storage.Close(); + } + } + + public override void Delete(Guid documentID) + { + lock (this) + { + storage.Delete(documentID); + cache.Forget(documentID); + } + } + + public override void EnsureIndex(params string[] path) => storage.EnsureIndex(path); + public override IEnumerable GetDocumentIDs() => storage.GetDocumentIDs(); + public override IEnumerable GetDocumentIDs(string path, Predicate predicate) => storage.GetDocumentIDs(path, predicate); + + public override Document Load(Guid documentID) + { + if (!Contains(documentID)) + throw new KeyNotFoundException(); + + Document document = null; + if (!cache.TryGet(documentID,out document)) + document = storage.Load(documentID); + + cache.Ensure(documentID, document); + + return document; + } + + public override bool Open() + { + return storage.Open(); + } + + public override void Save(Document document) + { + lock (this) + { + storage.Save(document); + } + } + + public override bool Contains(Guid documentID) => storage.Contains(documentID); + + public override void Dispose() + { + storage.Dispose(); + cache.Clear(); + cache = null; + } + + public override bool Refresh(Document document) + { + return storage.Refresh(document); + } + + public override bool IsCaching => true; + } +} diff --git a/ng/storage/fs/FSStorageContainer.cs b/ng/storage/fs/FSStorageContainer.cs new file mode 100644 index 0000000..998ca06 --- /dev/null +++ b/ng/storage/fs/FSStorageContainer.cs @@ -0,0 +1,145 @@ +// /** +// * File: FSStorage.cs +// * Author: haraldwolff +// * +// * This file and it's content is copyrighted by the Author and / or copyright holder. +// * Any use wihtout proper permission is illegal and may lead to legal actions. +// * +// * +// **/ +using System; +using System.Collections.Generic; +using System.IO; +using System.Diagnostics; +using ln.logging; +using ln.types.odb.ng.storage.cache; + +namespace ln.types.odb.ng.storage.fs +{ + public class FSStorageContainer : IStorageContainer,IDisposable + { + public string BasePath { get; } + public int DefaultCacheSize { get; set; } + + FileStream lockFile; + Dictionary storages = new Dictionary(); + + public FSStorageContainer(string basePath) + { + BasePath = basePath; + } + public bool IsOpen => lockFile != null; + + private void AssertOpen() + { + if (!IsOpen) + throw new IOException("FSStorage not open"); + } + + public void Close() + { + lock (this) + { + AssertOpen(); + + foreach (IStorage storage in storages.Values) + { + if (storage.IsOpen) + storage.Close(); + storage.Dispose(); + } + + if (lockFile != null) + { + lockFile.Close(); + lockFile.Dispose(); + lockFile = null; + } + } + } + + public IStorage GetStorage(string storageName) + { + lock (this) + { + AssertOpen(); + + if (!storages.ContainsKey(storageName)) + { + IStorage storage = new SegmentedFileStorage(Path.Combine(BasePath, storageName)); + + if (DefaultCacheSize > 0) + storage = new CachingStorage(storage) { MaxCacheSize = DefaultCacheSize, }; + + storages.Add(storageName, storage); + } + if (!storages[storageName].IsOpen) + storages[storageName].Open(); + + return storages[storageName]; + } + } + + public IEnumerable GetStorageNames() + { + lock (this) + { + AssertOpen(); + return storages.Keys; + } + } + + public IStorageContainer Open() + { + lock (this) + { + if (!IsOpen) + { + if (!Directory.Exists(BasePath)) + Directory.CreateDirectory(BasePath); + + try + { + lockFile = new FileStream(Path.Combine(BasePath, ".lock"), FileMode.CreateNew, FileAccess.ReadWrite, FileShare.Read, 1024, FileOptions.DeleteOnClose); + lockFile.WriteInteger(Process.GetCurrentProcess().Id); + lockFile.Flush(); + } catch (IOException) + { + if (File.Exists(Path.Combine(BasePath, ".lock"))) + { + lockFile = new FileStream(Path.Combine(BasePath, ".lock"), FileMode.Open, FileAccess.Read, FileShare.Read); + int lockPID = lockFile.ReadInteger(); + lockFile.Close(); + + Process lockProcess = null; + + try + { + lockProcess = Process.GetProcessById(lockPID); + } catch (Exception) + { + } + + if ((lockProcess != null) && !lockProcess.HasExited) + throw; + + Logging.Log(LogLevel.DEBUG, "FSStorageContainer: Ignoring stale lock file: PID:{0}@{1}", lockPID,Path.Combine(BasePath, ".lock")); + + lockFile = new FileStream(Path.Combine(BasePath, ".lock"), FileMode.Truncate, FileAccess.ReadWrite, FileShare.Read, 1024, FileOptions.DeleteOnClose); + lockFile.WriteInteger(Process.GetCurrentProcess().Id); + lockFile.Flush(); + } + } + } + } + return this; + } + + public void Dispose() + { + if (IsOpen) + Close(); + storages.Clear(); + } + } +} diff --git a/ng/storage/fs/SegmentedFileStorage.cs b/ng/storage/fs/SegmentedFileStorage.cs new file mode 100644 index 0000000..bbace02 --- /dev/null +++ b/ng/storage/fs/SegmentedFileStorage.cs @@ -0,0 +1,375 @@ +// /** +// * File: FSSTorage.cs +// * Author: haraldwolff +// * +// * This file and it's content is copyrighted by the Author and / or copyright holder. +// * Any use wihtout proper permission is illegal and may lead to legal actions. +// * +// * +// **/ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using ln.logging; +using ln.types.odb.ng.index; +using ln.types.odb.ng.storage.bases; +using ln.types.btree; +using ln.types.threads; +using ln.objects.catalog; + +namespace ln.types.odb.ng.storage +{ + /** + * FSStorage + * + * Directory Layout: + * + * / + * /data.odb Serialized Document Data + * /data.idx Serialized Lookup Index for Documents and Free Areas + * + * data.odb + * ---------- + * 0000 4 MAGIC Bytes + * 0004 4 Version + * 0008 8 LastCloseTimestamp + * 0010 4 FirstOffset + * 0014 4 GranularWidth + * 0018 8 Reserved 0 + * + **/ + public class SegmentedFileStorage : StorageBase + { + public String StoragePath { get; } + public String DataFileName => System.IO.Path.Combine(StoragePath, "data.odb"); + + public override bool IsCaching => false; + public bool AutoFlush { get; set; } = true; + + SegmentedFile segmentedFile; + + MappingBTree unusedSegments = new MappingBTree((s)=>s.Offset); + MappingBTree usedSegments = new MappingBTree((s)=>s.ID); + + IndexPath.DocumentPath indexRoot = new IndexPath.DocumentPath(); + + + public SegmentedFileStorage(string storagePath) + { + StoragePath = storagePath; + } + + private void AssertOpen() + { + if (!IsOpen) + throw new IOException("Not open"); + } + + public override bool IsOpen => ((segmentedFile != null) && segmentedFile.IsOpen); + + public override bool Open() + { + if (!IsOpen) + { + try + { + if (!Directory.Exists(StoragePath)) + Directory.CreateDirectory(StoragePath); + + segmentedFile = new SegmentedFile(DataFileName); + segmentedFile.Open(); + + foreach (SegmentedFile.Segment segment in segmentedFile.Segments) + { + if (Guid.Empty.Equals(segment.ID)) + { + unusedSegments.Add(segment); + } + else + { + if (usedSegments.TryGet(segment.ID, out SegmentedFile.Segment existing)) + { + if (existing.TimeStamp < segment.TimeStamp) + { + existing.ID = Guid.Empty; + segmentedFile.Write(existing, new byte[0]); + + usedSegments.RemoveKey(existing.ID); + unusedSegments.Add(existing); + } + else + { + segment.ID = Guid.Empty; + segmentedFile.Write(segment, new byte[0]); + unusedSegments.Add(segment); + } + } + else + { + usedSegments.Add(segment); + } + } + } + + if (File.Exists(System.IO.Path.Combine(StoragePath, "indeces.lst"))) + { + bool needsRebuild = false; + + using (FileStream indexLst = new FileStream(System.IO.Path.Combine(StoragePath, "indeces.lst"), FileMode.Open)) + { + byte[] indexLstBytes = indexLst.ReadBytes((int)indexLst.Length); + ODBList idxList = new ODBList(indexLstBytes, 0, indexLstBytes.Length); + + foreach (ODBEntity indexName in idxList) + { + indexRoot.Ensure(IndexPath.SplitPath(indexName.As())); + } + } + + foreach (Index index in indexRoot.GetIndeces()) + { + if (!index.LoadIndex(StoragePath, segmentedFile.LastCloseTimestamp)) + needsRebuild = true; + } + + if (needsRebuild) + RebuildIndeces(); + } + return true; + } catch (Exception) + { + segmentedFile?.Close(); + segmentedFile = null; + + usedSegments.Clear(); + unusedSegments.Clear(); + + throw; + } + } + return false; + } + + public override void Close() + { + lock (this){ + AssertOpen(); + + segmentedFile.Close(); + + List indexNames = new List(); + + foreach (Index index in indexRoot.GetIndeces()) + { + indexNames.Add(index.IndexName); + index.SaveIndex(StoragePath, segmentedFile.LastCloseTimestamp); + } + + ODBList indexList = new ODBList(); + indexList.AddRange(indexNames.Select((x) => ODBEntity.FromNative(x))); + + FileStream indexLst = new FileStream(System.IO.Path.Combine(StoragePath, "indeces.lst"), FileMode.Create); + indexLst.WriteBytes(indexList.GetStorageBytes()); + indexLst.Close(); + indexLst.Dispose(); + } + } + + public void Sync() + { + lock (this) + { + segmentedFile.Sync(); + } + } + + public override IEnumerable GetDocumentIDs() + { + lock (this) + { + return usedSegments.Keys.ToArray(); + } + } + + public override Document Load(Guid documentID) + { + lock (this) + { + if (!usedSegments.TryGet(documentID,out SegmentedFile.Segment segment)) + throw new KeyNotFoundException(); + + return LoadDocument(segment); + } + } + + public override bool Contains(Guid documentID) + { + lock (this) + { + return usedSegments.ContainsKey(documentID); + } + } + + private Document LoadDocument(SegmentedFile.Segment segment) + { + byte[] storageBytes = segmentedFile.Read(segment); + + try + { + return new Document(storageBytes) { StorageTimeStamp = segment.TimeStamp, }; + } catch (Exception e) + { + Logging.Log(LogLevel.DEBUG, "Exception while Deserializing Document from FSStorage: {1} ID={0}",segment.ID,StoragePath); + Logging.Log(LogLevel.DEBUG, "StorageArea: {0}", segment); + Logging.Log(e); + throw; + } + } + + public override void Save(Document document) + { + lock (this) + { + byte[] storageBytes = document.GetStorageBytes(); + + SegmentedFile.Segment segment = PopUnusedSegment(storageBytes.Length); + if (segment == null) + { + segment = segmentedFile.Append(document.ID,storageBytes); + } + else + { + segment.ID = document.ID; + segmentedFile.Write(segment,storageBytes); + } + + indexRoot.Replace(document.ID, document); + + if (usedSegments.TryGet(document.ID,out SegmentedFile.Segment previousSegment)) + { + usedSegments.RemoveKey(document.ID); + + previousSegment.ID = Guid.Empty; + segmentedFile.Write(previousSegment,new byte[0]); + + PushUnusedSegment(previousSegment); + } + document.StorageTimeStamp = segment.TimeStamp; + usedSegments.Add(segment); + + if (AutoFlush) + segmentedFile.Sync(); + } + } + + public override void Delete(Guid documentID) + { + lock (this) + { + if (usedSegments.TryGet(documentID, out SegmentedFile.Segment segment)) + { + usedSegments.RemoveKey(documentID); + + segment.ID = Guid.Empty; + segmentedFile.Write(segment, new byte[0]); + + indexRoot.Remove(documentID); + + PushUnusedSegment(segment); + + if (AutoFlush) + segmentedFile.Sync(); + } + } + } + + public override bool Refresh(Document document) + { + Load(document.ID).CloneTo(document); + return true; + } + + + + private SegmentedFile.Segment PopUnusedSegment(int payloadSize) + { + foreach (SegmentedFile.Segment segment in unusedSegments) + { + if (segment.PayloadSize >= payloadSize) + { + unusedSegments.Remove(segment); + return segment; + } + } + return null; + } + + private void PushUnusedSegment(SegmentedFile.Segment segment) + { + unusedSegments.Add(segment); + } + + public override DateTime GetStorageTimestamp(Guid documentID) + { + if (usedSegments.ContainsKey(documentID)) + return usedSegments[documentID].TimeStamp; + return default(DateTime); + } + + public override IEnumerable GetDocumentIDs(string path, Predicate predicate) + { + lock (this) + { + index.Path p = index.IndexPath.SplitPath(path); + if (indexRoot.Indexed(p)) + { + return indexRoot.GetDocumentIDs(p, predicate); + } + else + { + HashSet documentIDs = new HashSet(); + + IEnumerable ids = GetDocumentIDs(); + + foreach (Guid documentID in ids) + { + Document document = Load(documentID); + if (predicate(document[path])) + documentIDs.Add(documentID); + } + + return documentIDs; + } + } + } + + public override void EnsureIndex(params string[] paths) + { + lock (this) + { + bool needsRebuild = false; + + foreach (String path in paths) + { + if (indexRoot.Ensure(IndexPath.SplitPath(path))) + needsRebuild = true; + } + + if (needsRebuild) + RebuildIndeces(); + } + } + + public void RebuildIndeces() + { + Logging.Log(LogLevel.INFO, "FSStorage: RebuildIndeces()"); + foreach (Guid documentID in GetDocumentIDs()) + { + Document document = Load(documentID); + indexRoot.Replace(documentID, document); + } + } + + } +} diff --git a/ng/storage/session/SessionStorage.cs b/ng/storage/session/SessionStorage.cs new file mode 100644 index 0000000..573625b --- /dev/null +++ b/ng/storage/session/SessionStorage.cs @@ -0,0 +1,84 @@ +using System; +using System.Collections.Generic; +using ln.types.odb.values; +using System.Linq; +using ln.types.odb.ng.storage.bases; +using ln.types.threads; +using ln.types.odb.ng.diff; +using ln.types.collections; +namespace ln.types.odb.ng.storage.session +{ + public class SessionStorage : ChainedStorage + { + public SessionStorageContainer SessionContainer { get; } + + WeakKeyDictionary cache = new WeakKeyDictionary(); + + public SessionStorage(SessionStorageContainer session, IStorage storage) + :base(storage) + { + SessionContainer = session; + } + + public override Document Load(Guid documentID) + { + lock (this) + { + if (!Storage.Contains(documentID)) + throw new KeyNotFoundException(); + + Document keyDocument = new Document(documentID); + + if (cache.ContainsKey(keyDocument)) + return cache.GetKeyInstance(keyDocument); + + Document document = Storage.Load(documentID); + Document cacheDocument = document.Clone() as Document; + + cache.Add(document, cacheDocument); + + return document; + } + } + + public override void Save(Document document) + { + lock (this) + { + if (cache.ContainsKey(document)) + { + using (Storage.Lock()) + { + Document cacheDocument = cache[document]; + Document storageDocument = Storage.Load(document.ID); + + DocumentDiff documentDiff = new DocumentDiff(cacheDocument, document); + documentDiff.Apply(storageDocument); + + Storage.Save(storageDocument); + cache[document] = document.Clone() as Document; + } + } + else + { + Storage.Save(document); + cache.Add(document, document.Clone() as Document); + } + } + } + + public override void Delete(Guid documentID) + { + lock (this) + { + Document keyDocument = new Document(documentID); + + Storage.Delete(documentID); + cache.Remove(keyDocument); + } + } + + public override bool IsCaching => true; + + } +} diff --git a/ng/storage/session/SessionStorageContainer.cs b/ng/storage/session/SessionStorageContainer.cs new file mode 100644 index 0000000..2f670cc --- /dev/null +++ b/ng/storage/session/SessionStorageContainer.cs @@ -0,0 +1,67 @@ +using System; +using System.Collections.Generic; +using ln.types.odb.values; +using ln.logging; +using System.Linq; +namespace ln.types.odb.ng.storage.session +{ + public enum SessionSynchronisationStrategy + { + BIDIRECTIONAL, + FORWARDONLY, + REJECTCHANGED + } + + public class SessionStorageContainer : IStorageContainer + { + public IStorageContainer StorageContainer { get; private set; } + public Mapper ODBMapper { get; private set; } + + public SessionSynchronisationStrategy SynchronisationStrategy { get; set; } = SessionSynchronisationStrategy.REJECTCHANGED; + + public bool IsOpen => StorageContainer.IsOpen; + + public SessionStorageContainer(IStorageContainer storageContainer) + { + StorageContainer = storageContainer; + ODBMapper = Mapper.Default; + } + + private Dictionary storages = new Dictionary(); + public IStorage GetStorage(string storageName) + { + if (storages.ContainsKey(storageName)) + return storages[storageName]; + + IStorage storage = StorageContainer.GetStorage(storageName); + storages.Add(storageName, new SessionStorage(this,storage)); + + if (!storage.IsOpen) + storage.Open(); + + return storages[storageName]; + } + + public IStorageContainer Open() + { + StorageContainer.Open(); + return this; + } + + public void Close() + { + StorageContainer.Close(); + } + + public IEnumerable GetStorageNames() + { + return StorageContainer.GetStorageNames(); + } + + public void Dispose() + { + } + + + } +} diff --git a/query/Criterion.cs b/query/Criterion.cs new file mode 100644 index 0000000..3be3c93 --- /dev/null +++ b/query/Criterion.cs @@ -0,0 +1,11 @@ +using System; +using System.Collections.Generic; +using System.Text; + +namespace ln.objects.query +{ + public abstract class Criterion + { + public abstract bool Test(object reference, object value); + } +} diff --git a/query/EqualsCriterion.cs b/query/EqualsCriterion.cs new file mode 100644 index 0000000..ae55f25 --- /dev/null +++ b/query/EqualsCriterion.cs @@ -0,0 +1,13 @@ +using System; +using System.Collections.Generic; +using System.Text; + +namespace ln.objects.query +{ + public class EqualsCriterion : Criterion + { + + public override bool Test(object reference, object value) => object.Equals(reference, value); + + } +} diff --git a/serialization/Deserializer.cs b/serialization/Deserializer.cs new file mode 100644 index 0000000..4ad7900 --- /dev/null +++ b/serialization/Deserializer.cs @@ -0,0 +1,39 @@ +using ln.type; +using System; +using System.Collections.Generic; + +namespace ln.objects.serialization +{ + public delegate bool LookupObjectByReference(object reference, Type objectType, out object o); + + public abstract class Deserializer + { + public event LookupObjectByReference OnLookupObjectByReference; + + public abstract bool DeserializeObject(byte[] serializedBytes, ref object o); + + public bool TryLookupObject(object reference, Type targetType, out object o) + { + foreach (LookupObjectByReference lo in OnLookupObjectByReference?.GetInvocationList() ?? new LookupObjectByReference[0]) + { + if (lo(reference, targetType, out o)) + return true; + } + o = null; + return false; + } + + public virtual bool TryGetType(byte[] serializedBytes,out Type type) + { + object o = null; + if (DeserializeObject(serializedBytes,ref o) && (!object.ReferenceEquals(null,o))) + { + type = o.GetType(); + return true; + } + type = null; + return false; + } + + } +} diff --git a/serialization/Serializer.cs b/serialization/Serializer.cs new file mode 100644 index 0000000..216dd04 --- /dev/null +++ b/serialization/Serializer.cs @@ -0,0 +1,40 @@ +using ln.objects.catalog; +using System; +using System.Collections.Generic; +using System.Text; +using System.Xml; + +namespace ln.objects.serialization +{ + public delegate bool LookupReference(object value, out object reference); + + public delegate bool MangleValue(Type expectedType, ref object value); + + public abstract class Serializer + { + public event LookupReference OnLookupReference; + public event MangleValue OnMangleValue; + + public abstract bool SerializeObject(object o, out byte[] serializedBytes); + + public bool TryLookupReference(object o, out object reference) + { + foreach (LookupReference lr in OnLookupReference?.GetInvocationList() ?? new LookupReference[0]) + { + if (lr(o, out reference)) + return true; + } + reference = null; + return false; + } + + public bool MangleValue(Type expectedType,ref object value) + { + bool mangled = false; + foreach (MangleValue mv in OnMangleValue?.GetInvocationList() ?? new MangleValue[0]) + if (mv(expectedType, ref value)) + mangled = true; + return mangled; + } + } +} diff --git a/serialization/binary/BinaryDeserializer.cs b/serialization/binary/BinaryDeserializer.cs new file mode 100644 index 0000000..668134d --- /dev/null +++ b/serialization/binary/BinaryDeserializer.cs @@ -0,0 +1,246 @@ +using ln.type; +using System; +using System.IO; +using System.Reflection; +using System.Runtime.Serialization; +using System.Text; + +namespace ln.objects.serialization.binary +{ + public class BinaryDeserializer : Deserializer + { + public override bool DeserializeObject(byte[] serializedBytes, ref object o) + { + MemoryStream ms = new MemoryStream(serializedBytes); + if (!Object.ReferenceEquals(null,o) && (serializedBytes[0] == 'S')) + { + ms.ReadByte(); + String typeName = (string)Deserialize(ms); + DeserializeStructured(ms, o.GetType(), o); + } + else + { + o = Deserialize(ms); + } + return true; + } + + public override bool TryGetType(byte[] serializedBytes, out Type type) => TryGetType(new MemoryStream(serializedBytes), out type); + public bool TryGetType(Stream stream, out Type type) + { + int tc; + + switch (tc = stream.ReadByte()) + { + case -1: + throw new EndOfStreamException(); + case '0': + type = null; + break; + case 'P': + return TryGetPrimitiveType(stream, out type); + case 'R': + case 'S': + case 'E': + type = Type.GetType(Deserialize(stream) as string); + break; + case 'A': + type = Type.GetType(Deserialize(stream) as string).MakeArrayType(); + break; + case 'B': + type = typeof(byte[]); + break; + case 'G': + type = typeof(Guid); + break; + default: + type = null; + return false; + } + return true; + } + public bool TryGetPrimitiveType(Stream stream, out Type type) + { + int tc; + + switch (tc = stream.ReadByte()) + { + case -1: + throw new EndOfStreamException(); + case 'I': + type = typeof(int); + break; + case 'i': + type = typeof(uint); + break; + case 'B': + type = typeof(byte); + break; + case 'C': + type = typeof(char); + break; + case 'S': + type = typeof(short); + break; + case 's': + type = typeof(ushort); + break; + case 'L': + type = typeof(long); + break; + case 'l': + type = typeof(ulong); + break; + case 'F': + type = typeof(float); + break; + case 'D': + type = typeof(double); + break; + case 'b': + type = typeof(bool); + break; + case 'T': + type = typeof(string); + break; + default: + type = null; + return false; + } + return true; + + } + + object Deserialize(Stream stream) + { + int tc; + + switch (tc = stream.ReadByte()) + { + case -1: + throw new EndOfStreamException(); + case '0': + return null; + case 'P': + return DeserializePrimitive(stream); + case 'R': + return DeserializeReference(stream); + case 'S': + return DeserializeStructured(stream); + case 'E': + return DeserializeEnum(stream); + case 'A': + return DeserializeArray(stream); + case 'B': + return DeserializeByteArray(stream); + case 'G': + return DeserializeGuid(stream); + default: + throw new NotSupportedException(String.Format("Unsupported type code: {0}", (char)tc)); + } + } + + + object DeserializePrimitive(Stream stream) + { + int tc; + + switch (tc = stream.ReadByte()) + { + case -1: + throw new EndOfStreamException(); + case 'I': + return stream.ReadInteger(); + case 'i': + return stream.ReadUInteger(); + case 'B': + return (byte)stream.ReadByte(); + case 'C': + return (char)stream.ReadShort(); + case 'S': + return stream.ReadShort(); + case 's': + return stream.ReadUShort(); + case 'L': + return stream.ReadLong(); + case 'l': + return stream.ReadULong(); + case 'F': + return stream.ReadFloat(); + case 'D': + return stream.ReadDouble(); + case 'b': + return stream.ReadByte() != 0; + case 'T': + int tl = stream.ReadInteger(); + byte[] tbytes = stream.ReadBytes(tl); + return Encoding.UTF8.GetString(tbytes); + default: + throw new NotSupportedException(String.Format("Unsupported primitive type code: {0}", (char)tc)); + } + } + + object DeserializeGuid(Stream stream) => new Guid(stream.ReadBytes(16)); + + object DeserializeEnum(Stream stream) + { + Type eType = Type.GetType(Deserialize(stream) as string); + object value = Deserialize(stream); + if (value is int nvalue) + return Enum.ToObject(eType, nvalue); + else if (value is string svalue) + return Enum.Parse(eType, svalue); + else + throw new NotSupportedException("Unsupported ENUM repressentation found"); + } + + object DeserializeByteArray(Stream stream) + { + int length = stream.ReadInteger(); + return stream.ReadBytes(length); + } + + object DeserializeArray(Stream stream) + { + Type etype = Type.GetType(Deserialize(stream) as string); + int length = stream.ReadInteger(); + Array array = Array.CreateInstance(etype, length); + + for (int n = 0; n < array.Length; n++) + array.SetValue(Deserialize(stream), n); + + return array; + } + + object DeserializeStructured(Stream stream) + { + String typeName = (string)Deserialize(stream); + Type type = Type.GetType(typeName); + object o = type.IsValueType ? Activator.CreateInstance(type) : Activator.CreateInstance(type, true); + DeserializeStructured(stream, type, o); + return o; + } + void DeserializeStructured(Stream stream, Type type, object o) + { + int nFields = stream.ReadInteger(); + + for (int n=0;n() != null) + SerializePrimitive(stream, (int)value); + else + SerializePrimitive(stream, value.ToString()); + } + + void SerializeArray(Stream stream, object value) + { + if (value is byte[] ba) + { + stream.WriteByte('B'); + stream.WriteInteger(ba.Length); + stream.WriteBytes(ba); + } + else + { + Array array = (Array)value; + + stream.WriteByte('A'); + SerializePrimitive(stream, array.GetType().GetElementType().GetSimpleQualifiedName()); + stream.WriteInteger(array.Length); + + for (int n = 0; n < array.Length; n++) + Serialize(stream, array.GetValue(n)); + } + } + + void SerializeValue(Stream stream, object value) + { + Type valueType = value.GetType(); + + if (value is Guid guid) + { + stream.WriteByte('G'); + stream.WriteBytes(guid.ToByteArray()); + } + else + SerializeStructured(stream, value); + } + + void SerializeStructured(Stream stream, object value) + { + Type type = value.GetType(); + FieldInfo[] fields = type.GetFields(BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public); + + stream.WriteByte('S'); + SerializePrimitive(stream, type.GetSimpleQualifiedName()); + stream.WriteInteger(fields.Length); + + foreach (FieldInfo fieldInfo in fields) + { + object v = fieldInfo.GetValue(value); + + if (MangleValue(fieldInfo.FieldType, ref v)) + fieldInfo.SetValue(value, v); + + SerializePrimitive(stream, fieldInfo.Name); + Serialize(stream, v); + } + } + + + } +} diff --git a/serialization/json/JSONDeserializer.cs b/serialization/json/JSONDeserializer.cs new file mode 100644 index 0000000..d51d2c9 --- /dev/null +++ b/serialization/json/JSONDeserializer.cs @@ -0,0 +1,75 @@ +using ln.json; +using ln.json.mapping; +using ln.logging; +using System; +using System.Collections.Generic; +using System.Reflection; +using System.Text; + +namespace ln.objects.serialization.json +{ + public class JSONDeserializer : Deserializer + { + ObjectStore ObjectStore; + public JSONMapper Mapper { get; } + + public JSONDeserializer(ObjectStore objectStore) + { + ObjectStore = objectStore; + Mapper = new JSONMapper() { DefaultBindingFlags = BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic, DefaultMappingFlags = JSONObjectMappingFlags.FIELDS }; + Mapper.OnRequestCustomUnserialization += Mapper_OnRequestCustomUnserialization; + + Mapper.AddMappingFactory( + typeof(IList<>), + (Type targetType, out JSONMapping mapping) => + { + mapping = (JSONMapping)Activator.CreateInstance(typeof(LazyListMapping<>).MakeGenericType(targetType.GetGenericArguments()),objectStore); + return false; + }); + + } + + private bool Mapper_OnRequestCustomUnserialization(JSONValue json, Type targetType, out object o) + { + if (!(json is JSONNull) && !targetType.IsValueType && !targetType.IsInterface && Mapper.GetOrBuildMapping(targetType, out JSONMapping mapping) && mapping is JSONObjectMapping) + { + Guid uid = new Guid(Convert.FromBase64String((json as JSONString).Value)); + return TryLookupObject(uid, targetType, out o); + } + o = null; + return false; + } + + public override bool DeserializeObject(byte[] serializedBytes, ref object o) + { + throw new NotImplementedException(); + /* + if (ObjectStore.DEBUG) + Logging.Log(LogLevel.DEBUG, "Deserializing: {0}", Encoding.UTF8.GetString(serializedBytes)); + + if (serializedBytes.Length == 0) + { + o = null; + return true; + } + + if (Mapper.GetOrBuildMapping(targetType, out JSONMapping mapping)) + { + JSONValue json = JSONParser.Parse(Encoding.UTF8.GetString(serializedBytes)); + if (mapping is JSONObjectMapping objectMapping) + { + objectMapping.Apply(Mapper, json as JSONObject, o); + } + else + { + o = mapping.FromJson(Mapper, json); + } + + return true; + } + + throw new NotSupportedException(); + */ + } + } +} diff --git a/serialization/json/JSONSerializer.cs b/serialization/json/JSONSerializer.cs new file mode 100644 index 0000000..a97c11b --- /dev/null +++ b/serialization/json/JSONSerializer.cs @@ -0,0 +1,67 @@ +using ln.json; +using ln.json.mapping; +using ln.logging; +using System; +using System.Text; +using System.Reflection; +using System.Collections.Generic; + +namespace ln.objects.serialization.json +{ + public class JSONSerializer : Serializer + { + public JSONMapper Mapper { get; } + public ObjectStore ObjectStore { get; } + + public JSONSerializer(ObjectStore objectStore) + { + ObjectStore = objectStore; + Mapper = new JSONMapper() { DefaultBindingFlags = BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic, DefaultMappingFlags = JSONObjectMappingFlags.FIELDS }; + Mapper.OnRequestCustomSerialization += Mapper_OnRequestCustomSerialization; + + Mapper.AddMappingFactory( + typeof(IList<>), + (Type targetType, out JSONMapping mapping) => + { + mapping = (JSONMapping)Activator.CreateInstance(typeof(LazyListMapping<>).MakeGenericType(targetType.GetGenericArguments()), objectStore); + return false; + }); + + } + + private bool Mapper_OnRequestCustomSerialization(object o, out JSONValue json) + { + if (!ReferenceEquals(null, o) && !o.GetType().IsValueType && !o.GetType().IsInterface && Mapper.GetOrBuildMapping(o.GetType(), out JSONMapping mapping) && mapping is JSONObjectMapping) + { + if (TryLookupReference(o, out object reference)) + { + json = new JSONString(Convert.ToBase64String(((Guid)reference).ToByteArray())); + return true; + } + } + json = null; + return false; + } + + public override bool SerializeObject(object o, out byte[] serializedBytes) + { + if (ReferenceEquals(null, o)) + { + serializedBytes = new byte[0]; + return true; + } + + if (Mapper.GetOrBuildMapping(o.GetType(), out JSONMapping mapping)) + { + JSONValue json = mapping.ToJson(Mapper, o); + serializedBytes = Encoding.UTF8.GetBytes(json.ToString()); + if (ObjectStore.DEBUG) + Logging.Log(LogLevel.DEBUG, "Serialized: {0}", json.ToString()); + + return true; + } + + throw new NotSupportedException(); + } + } +} diff --git a/serialization/json/LazyListMapping.cs b/serialization/json/LazyListMapping.cs new file mode 100644 index 0000000..8bb892d --- /dev/null +++ b/serialization/json/LazyListMapping.cs @@ -0,0 +1,51 @@ +using ln.json; +using ln.json.mapping; +using ln.objects.collections; +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; +using System.Text; + +namespace ln.objects.serialization.json +{ + public class LazyListMapping : JSONMapping where T:class + { + ObjectStore ObjectStore; + Type ElementType; + + public LazyListMapping(ObjectStore objectStore) + :base(typeof(IList<>)) + { + ObjectStore = objectStore; + ElementType = typeof(T); + } + + public override JSONValue ToJson(JSONMapper mapper, object value) + { + if (!(value is IList ilist)) + throw new ArgumentException(nameof(value)); + JSONArray jsonArray = new JSONArray(); + + foreach (T item in ilist) + if (mapper.Serialize(item, out JSONValue jsonItem)) + jsonArray.Add(jsonItem); + else + throw new SerializationException(); + + return jsonArray; + } + + public override object FromJson(JSONMapper mapper, JSONValue json) + { + IList ilist = new LazyList(ObjectStore); + + JSONArray jsonArray = json as JSONArray; + + foreach (JSONValue item in jsonArray.Children) + ilist.Add((T)mapper.FromJson(item, ElementType)); + + return ilist; + } + + } +} diff --git a/storage/BinaryObject.cs b/storage/BinaryObject.cs new file mode 100644 index 0000000..1aae37c --- /dev/null +++ b/storage/BinaryObject.cs @@ -0,0 +1,24 @@ +using System; +using System.Collections.Generic; +using System.Dynamic; +using System.Text; + +namespace ln.objects.storage +{ + public class BinaryObject + { + public Guid UID { get; set; } + public int Version { get; set; } + + public long Offset { get; set; } + public int PhySize { get; set; } + public int LogSize { get; set; } + + public BinaryObject() { } + public BinaryObject(long offset,int phySize) + { + Offset = offset; + PhySize = phySize; + } + } +} diff --git a/storage/BinaryObjectCache.cs b/storage/BinaryObjectCache.cs new file mode 100644 index 0000000..7353502 --- /dev/null +++ b/storage/BinaryObjectCache.cs @@ -0,0 +1,82 @@ +using ln.collections; +using System; +using System.Collections.Generic; +using System.Text; + +namespace ln.objects.storage +{ + class BinaryObjectCache + { + BTree> firstLevel = new BTree>(); + + + public BinaryObjectCache() { } + + BTree GetSecondLevel(Guid uid) + { + if (!firstLevel.TryGet(uid,out BTree secondLevel)) + { + secondLevel = new BTree(); + firstLevel.Add(uid, secondLevel); + } + return secondLevel; + } + bool TryGetSecondLevel(Guid uid, out BTree secondLevel) => firstLevel.TryGet(uid, out secondLevel) && !secondLevel.Empty; + + public void Add(BinaryObject binaryObject) + { + BTree secondLevel = GetSecondLevel(binaryObject.UID); + secondLevel.Add(binaryObject.Version, binaryObject); + } + public void Remove(BinaryObject binaryObject) + { + BTree secondLevel = GetSecondLevel(binaryObject.UID); + secondLevel.Remove(binaryObject.Version); + } + + public bool Contains(Guid uid) => GetSecondLevel(uid).ContainsKey(uid); + public IEnumerable UIDs => firstLevel.Keys; + + public IEnumerable GetBinaryObjects(Guid uid) => GetSecondLevel(uid).Values; + public BinaryObject GetLatestBinaryObject(Guid uid) + { + BTree secondLevel = GetSecondLevel(uid); + return secondLevel.LastValue(); + } + public BinaryObject GetBinaryObject(Guid uid, int version) => GetSecondLevel(uid)[version]; + + public bool TryGetBinaryObjects(Guid uid,out IEnumerable binaryObjects) + { + if (!TryGetSecondLevel(uid,out BTree secondLevel)) + { + binaryObjects = null; + return false; + } + + binaryObjects = secondLevel.Values; + return true; + } + public bool TryGetBinaryObject(Guid uid, int version, out BinaryObject binaryObject) + { + if (TryGetSecondLevel(uid, out BTree secondLevel)) + { + if (version == -1) + { + if (secondLevel.TryGetLastValue(out binaryObject)) + return true; + } + else + { + if (secondLevel.TryGet(version, out binaryObject)) + return true; + } + } + binaryObject = null; + return false; + } + + public bool TryGetLatestBinaryObject(Guid uid, out BinaryObject binaryObject) => GetSecondLevel(uid).TryGetLastValue(out binaryObject); + public bool TryGetFirstBinaryObject(Guid uid, out BinaryObject binaryObject) => GetSecondLevel(uid).TryGetFirstValue(out binaryObject); + + } +} diff --git a/storage/BinaryObjectFile.cs b/storage/BinaryObjectFile.cs new file mode 100644 index 0000000..840216e --- /dev/null +++ b/storage/BinaryObjectFile.cs @@ -0,0 +1,296 @@ +using ln.collections; +using ln.type; +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text; + +/* + * BinaryObjectFile Format + * + * Offset Type Description + * 0x0000 byte[4] Magic Number "BOF0" + * 0x0004 int32 FeatureFlags + * 0x0008 long offset of first binary object + * + * BinaryObject + * Guid UID UID of this object + * int32 Version Version of this object + * int32 phySize size of this binary object + * int32 logSize size of used payload + * int32 reserved Reserved 0 + * byte[phySize] object data + * + */ + + +namespace ln.objects.storage +{ + public enum BOFFeatures : int + { + NONE = 0 + } + public class BinaryObjectFile : IDisposable + { + public readonly byte[] MagicNumber = Encoding.ASCII.GetBytes("BOF0"); + + public string FileName { get; } + public BOFFeatures Features { get; private set; } = BOFFeatures.NONE; + + int granularity = 12; + public int Granularity + { + get => granularity; + set + { + if (granularity < 6) + throw new ArgumentOutOfRangeException(nameof(value), "Granularity must be >= 6"); + granularity = value; + } + } + public int GranularSize => (1 << granularity); + public int GranularMask => (1 << granularity) - 1; + + BinaryObjectCache binaryObjects; + BTree unusedBinaryObjects; + FileStream storageFile; + + long appendOffset; + + public BinaryObjectFile(string filename) + { + FileName = filename; + } + + public void Open() + { + if (!File.Exists(FileName)) + { + Create(); + } else + { + OpenStorage(); + } + } + private void Create() + { + storageFile = new FileStream(FileName, FileMode.CreateNew, FileAccess.ReadWrite); + storageFile.WriteBytes(MagicNumber); + storageFile.WriteInteger((int)Features); + storageFile.WriteLong(4096); + storageFile.Flush(); + + binaryObjects = new BinaryObjectCache(); + unusedBinaryObjects = new BTree(); + + appendOffset = 4096; + } + private void OpenStorage() + { + storageFile = new FileStream(FileName, FileMode.Open, FileAccess.ReadWrite); + binaryObjects = new BinaryObjectCache(); + unusedBinaryObjects = new BTree(); + + if (!storageFile.ReadBytes(MagicNumber.Length).AreEqual(MagicNumber)) + throw new FormatException("Magic Number does not match"); + + Features = (BOFFeatures)storageFile.ReadInteger(); + + long offset = storageFile.ReadLong(); + + while (offset < storageFile.Length) + { + BinaryObject binaryObject = ReadBinaryObject(offset); + if (Guid.Empty.Equals(binaryObject.UID)) + unusedBinaryObjects.Add(binaryObject.Offset, binaryObject); + else + binaryObjects.Add(binaryObject); + + offset += binaryObject.PhySize; + } + + appendOffset = offset; + } + + private BinaryObject ReadBinaryObject(long offset) + { + storageFile.Position = offset; + + BinaryObject binaryObject = new BinaryObject(); + + binaryObject.UID = new Guid(storageFile.ReadBytes(16)); + binaryObject.Version = storageFile.ReadInteger(); + binaryObject.Offset = offset; + binaryObject.PhySize = storageFile.ReadInteger(); + binaryObject.LogSize = storageFile.ReadInteger(); + storageFile.ReadInteger(); + + return binaryObject; + } + private void WriteBinaryObject(BinaryObject binaryObject) + { + storageFile.Position = binaryObject.Offset; + + storageFile.WriteBytes(binaryObject.UID.ToByteArray()); + storageFile.WriteInteger(binaryObject.Version); + storageFile.WriteInteger(binaryObject.PhySize); + storageFile.WriteInteger(binaryObject.LogSize); + storageFile.WriteInteger(0); + } + private void WriteBinaryObject(BinaryObject binaryObject,byte[] data) + { + if (data.Length > (binaryObject.PhySize - 32)) + throw new ArgumentException(nameof(data),"more physical space needed for data"); + + binaryObject.LogSize = data.Length; + WriteBinaryObject(binaryObject); + storageFile.WriteBytes(data); + } + + + public IEnumerable ObjectUIDs => binaryObjects.UIDs; + public IEnumerable GetObjectVersions(Guid uid) => binaryObjects.GetBinaryObjects(uid).Select(bo => bo.Version); + public bool Contains(Guid uid) => binaryObjects.Contains(uid); + + public byte[] ReadBinaryObject(Guid uid) => ReadBinaryObject(uid, -1); + public byte[] ReadBinaryObject(Guid uid,int version) + { + lock (this) + { + BinaryObject binaryObject = (version == -1) ? binaryObjects.GetLatestBinaryObject(uid) : binaryObjects.GetBinaryObject(uid, version); + storageFile.Position = binaryObject.Offset + 32; + return storageFile.ReadBytes(binaryObject.LogSize); + } + } + public bool TryReadBinaryObject(Guid uid,out byte[] serializedBytes) => TryReadBinaryObject(uid, -1, out serializedBytes); + public bool TryReadBinaryObject(Guid uid, int version, out byte[] serializedBytes) + { + lock (this) + { + if (binaryObjects.TryGetBinaryObject(uid,version, out BinaryObject binaryObject)) + { + storageFile.Position = binaryObject.Offset + 32; + serializedBytes = storageFile.ReadBytes(binaryObject.LogSize); + return true; + } + } + serializedBytes = null; + return false; + } + + public void WriteBinaryObject(Guid uid, byte[] data) + { + lock (this) + { + if (!FindUnusedBinaryObject(data.Length + 32, out BinaryObject binaryObject)) + if (!AppendBinaryObject(data.Length + 32, out binaryObject)) + throw new OutOfMemoryException("could not append to storage"); + + if (binaryObjects.TryGetLatestBinaryObject(uid, out BinaryObject latestObject)) + binaryObject.Version = latestObject.Version + 1; + + binaryObject.UID = uid; + WriteBinaryObject(binaryObject, data); + binaryObjects.Add(binaryObject); + } + } + + public void RemoveBinaryObject(Guid uid, int version) + { + lock (this) + { + BinaryObject binaryObject = binaryObjects.GetBinaryObject(uid, version); + binaryObjects.Remove(binaryObject); + PushUnusedBinaryObject(ref binaryObject); + WriteBinaryObject(binaryObject); + } + } + public void RemoveBinaryObjects(Guid uid) + { + lock (this) + { + foreach (int version in GetObjectVersions(uid)) + { + RemoveBinaryObject(uid, version); + } + } + } + + private bool FindUnusedBinaryObject(int minPhySize,out BinaryObject binaryObject) + { + minPhySize = (minPhySize + GranularMask) & ~GranularMask; + + foreach (BinaryObject candidate in unusedBinaryObjects.Values) + { + if (candidate.PhySize >= minPhySize) + { + unusedBinaryObjects.Remove(candidate.Offset); + + binaryObject = candidate; + + if ((binaryObject.PhySize - minPhySize) >= GranularSize) + { + BinaryObject splitObject = new BinaryObject(binaryObject.Offset + minPhySize, binaryObject.PhySize - minPhySize); + binaryObject.PhySize = minPhySize; + WriteBinaryObject(splitObject); + unusedBinaryObjects.Add(splitObject.Offset, splitObject); + } + + return true; + } + } + binaryObject = null; + return false; + } + + private void PushUnusedBinaryObject(ref BinaryObject binaryObject) + { + binaryObject.UID = Guid.Empty; + unusedBinaryObjects.Add(binaryObject.Offset, binaryObject); + + if (unusedBinaryObjects.TryGetPreviousValue(binaryObject.Offset, out BinaryObject previousObject)) + { + if ((previousObject.Offset + previousObject.PhySize) == binaryObject.Offset) + { + previousObject.PhySize += binaryObject.PhySize; + unusedBinaryObjects.Remove(binaryObject.Offset); + binaryObject = previousObject; + } + } + if (unusedBinaryObjects.TryGetNextValue(binaryObject.Offset, out BinaryObject nextObject)) + { + if ((binaryObject.Offset + binaryObject.PhySize) == nextObject.Offset) + { + binaryObject.PhySize += nextObject.PhySize; + unusedBinaryObjects.Remove(nextObject.Offset); + } + } + } + + private bool AppendBinaryObject(int minPhySize,out BinaryObject binaryObject) + { + minPhySize = (minPhySize + GranularMask) & ~GranularMask; + binaryObject = new BinaryObject(appendOffset, minPhySize); + appendOffset += minPhySize; + return true; + } + + public void Close() + { + lock (this) + { + storageFile.Close(); + } + } + + public void Flush() => storageFile.Flush(); + + + public void Dispose() + { + storageFile?.Dispose(); + storageFile = null; + } + } +}