// /** // * File: FSSTorage.cs // * Author: haraldwolff // * // * This file and it's content is copyrighted by the Author and / or copyright holder. // * Any use wihtout proper permission is illegal and may lead to legal actions. // * // * // **/ using System; using System.Collections.Generic; using System.IO; using System.Linq; using ln.logging; using ln.types.odb.ng.index; using ln.types.odb.ng.storage.bases; using ln.types.btree; using ln.types.threads; using ln.objects.catalog; namespace ln.types.odb.ng.storage { /** * FSStorage * * Directory Layout: * * / * /data.odb Serialized Document Data * /data.idx Serialized Lookup Index for Documents and Free Areas * * data.odb * ---------- * 0000 4 MAGIC Bytes * 0004 4 Version * 0008 8 LastCloseTimestamp * 0010 4 FirstOffset * 0014 4 GranularWidth * 0018 8 Reserved 0 * **/ public class SegmentedFileStorage : StorageBase { public String StoragePath { get; } public String DataFileName => System.IO.Path.Combine(StoragePath, "data.odb"); public override bool IsCaching => false; public bool AutoFlush { get; set; } = true; SegmentedFile segmentedFile; MappingBTree unusedSegments = new MappingBTree((s)=>s.Offset); MappingBTree usedSegments = new MappingBTree((s)=>s.ID); IndexPath.DocumentPath indexRoot = new IndexPath.DocumentPath(); public SegmentedFileStorage(string storagePath) { StoragePath = storagePath; } private void AssertOpen() { if (!IsOpen) throw new IOException("Not open"); } public override bool IsOpen => ((segmentedFile != null) && segmentedFile.IsOpen); public override bool Open() { if (!IsOpen) { try { if (!Directory.Exists(StoragePath)) Directory.CreateDirectory(StoragePath); segmentedFile = new SegmentedFile(DataFileName); segmentedFile.Open(); foreach (SegmentedFile.Segment segment in segmentedFile.Segments) { if (Guid.Empty.Equals(segment.ID)) { unusedSegments.Add(segment); } else { if (usedSegments.TryGet(segment.ID, out SegmentedFile.Segment existing)) { if (existing.TimeStamp < segment.TimeStamp) { existing.ID = Guid.Empty; segmentedFile.Write(existing, new byte[0]); usedSegments.RemoveKey(existing.ID); unusedSegments.Add(existing); } else { segment.ID = Guid.Empty; segmentedFile.Write(segment, new byte[0]); unusedSegments.Add(segment); } } else { usedSegments.Add(segment); } } } if (File.Exists(System.IO.Path.Combine(StoragePath, "indeces.lst"))) { bool needsRebuild = false; using (FileStream indexLst = new FileStream(System.IO.Path.Combine(StoragePath, "indeces.lst"), FileMode.Open)) { byte[] indexLstBytes = indexLst.ReadBytes((int)indexLst.Length); ODBList idxList = new ODBList(indexLstBytes, 0, indexLstBytes.Length); foreach (ODBEntity indexName in idxList) { indexRoot.Ensure(IndexPath.SplitPath(indexName.As())); } } foreach (Index index in indexRoot.GetIndeces()) { if (!index.LoadIndex(StoragePath, segmentedFile.LastCloseTimestamp)) needsRebuild = true; } if (needsRebuild) RebuildIndeces(); } return true; } catch (Exception) { segmentedFile?.Close(); segmentedFile = null; usedSegments.Clear(); unusedSegments.Clear(); throw; } } return false; } public override void Close() { lock (this){ AssertOpen(); segmentedFile.Close(); List indexNames = new List(); foreach (Index index in indexRoot.GetIndeces()) { indexNames.Add(index.IndexName); index.SaveIndex(StoragePath, segmentedFile.LastCloseTimestamp); } ODBList indexList = new ODBList(); indexList.AddRange(indexNames.Select((x) => ODBEntity.FromNative(x))); FileStream indexLst = new FileStream(System.IO.Path.Combine(StoragePath, "indeces.lst"), FileMode.Create); indexLst.WriteBytes(indexList.GetStorageBytes()); indexLst.Close(); indexLst.Dispose(); } } public void Sync() { lock (this) { segmentedFile.Sync(); } } public override IEnumerable GetDocumentIDs() { lock (this) { return usedSegments.Keys.ToArray(); } } public override Document Load(Guid documentID) { lock (this) { if (!usedSegments.TryGet(documentID,out SegmentedFile.Segment segment)) throw new KeyNotFoundException(); return LoadDocument(segment); } } public override bool Contains(Guid documentID) { lock (this) { return usedSegments.ContainsKey(documentID); } } private Document LoadDocument(SegmentedFile.Segment segment) { byte[] storageBytes = segmentedFile.Read(segment); try { return new Document(storageBytes) { StorageTimeStamp = segment.TimeStamp, }; } catch (Exception e) { Logging.Log(LogLevel.DEBUG, "Exception while Deserializing Document from FSStorage: {1} ID={0}",segment.ID,StoragePath); Logging.Log(LogLevel.DEBUG, "StorageArea: {0}", segment); Logging.Log(e); throw; } } public override void Save(Document document) { lock (this) { byte[] storageBytes = document.GetStorageBytes(); SegmentedFile.Segment segment = PopUnusedSegment(storageBytes.Length); if (segment == null) { segment = segmentedFile.Append(document.ID,storageBytes); } else { segment.ID = document.ID; segmentedFile.Write(segment,storageBytes); } indexRoot.Replace(document.ID, document); if (usedSegments.TryGet(document.ID,out SegmentedFile.Segment previousSegment)) { usedSegments.RemoveKey(document.ID); previousSegment.ID = Guid.Empty; segmentedFile.Write(previousSegment,new byte[0]); PushUnusedSegment(previousSegment); } document.StorageTimeStamp = segment.TimeStamp; usedSegments.Add(segment); if (AutoFlush) segmentedFile.Sync(); } } public override void Delete(Guid documentID) { lock (this) { if (usedSegments.TryGet(documentID, out SegmentedFile.Segment segment)) { usedSegments.RemoveKey(documentID); segment.ID = Guid.Empty; segmentedFile.Write(segment, new byte[0]); indexRoot.Remove(documentID); PushUnusedSegment(segment); if (AutoFlush) segmentedFile.Sync(); } } } public override bool Refresh(Document document) { Load(document.ID).CloneTo(document); return true; } private SegmentedFile.Segment PopUnusedSegment(int payloadSize) { foreach (SegmentedFile.Segment segment in unusedSegments) { if (segment.PayloadSize >= payloadSize) { unusedSegments.Remove(segment); return segment; } } return null; } private void PushUnusedSegment(SegmentedFile.Segment segment) { unusedSegments.Add(segment); } public override DateTime GetStorageTimestamp(Guid documentID) { if (usedSegments.ContainsKey(documentID)) return usedSegments[documentID].TimeStamp; return default(DateTime); } public override IEnumerable GetDocumentIDs(string path, Predicate predicate) { lock (this) { index.Path p = index.IndexPath.SplitPath(path); if (indexRoot.Indexed(p)) { return indexRoot.GetDocumentIDs(p, predicate); } else { HashSet documentIDs = new HashSet(); IEnumerable ids = GetDocumentIDs(); foreach (Guid documentID in ids) { Document document = Load(documentID); if (predicate(document[path])) documentIDs.Add(documentID); } return documentIDs; } } } public override void EnsureIndex(params string[] paths) { lock (this) { bool needsRebuild = false; foreach (String path in paths) { if (indexRoot.Ensure(IndexPath.SplitPath(path))) needsRebuild = true; } if (needsRebuild) RebuildIndeces(); } } public void RebuildIndeces() { Logging.Log(LogLevel.INFO, "FSStorage: RebuildIndeces()"); foreach (Guid documentID in GetDocumentIDs()) { Document document = Load(documentID); indexRoot.Replace(documentID, document); } } } }