diff --git a/ConsoleApp1/ConsoleApp1.csproj b/ConsoleApp1/ConsoleApp1.csproj
new file mode 100644
index 000000000..3469f1a6c
--- /dev/null
+++ b/ConsoleApp1/ConsoleApp1.csproj
@@ -0,0 +1,14 @@
+
+
+
+ Exe
+ net8.0
+ enable
+ enable
+
+
+
+
+
+
+
diff --git a/ConsoleApp1/Program.cs b/ConsoleApp1/Program.cs
new file mode 100644
index 000000000..36366b320
--- /dev/null
+++ b/ConsoleApp1/Program.cs
@@ -0,0 +1,39 @@
+using LiteDB;
+using LiteDB.Engine;
+
+var password= "bzj2NplCbVH/bB8fxtjEC7u0unYdKHJVSmdmPgArRBwmmGw0+Wd2tE+b2zRMFcHAzoG71YIn/2Nq1EMqa5JKcQ==";
+var original = "C:\\LiteDB\\Examples\\Original.db";
+var path = $"C:\\LiteDB\\Examples\\TestCacheDb_{DateTime.Now.Ticks}.db";
+
+File.Copy(original, path);
+
+var settings = new EngineSettings
+{
+ //AutoRebuild = true,
+ Filename = path,
+ Password = password
+};
+
+/*
+var errors = new List();
+
+using var reader = new FileReaderV8(settings, errors);
+
+reader.Open();
+
+var pragmas = reader.GetPragmas();
+var cols = reader.GetCollections().ToArray();
+
+var docs = reader.GetDocuments("hubData$AppOperations").ToArray();
+*/
+
+var db = new LiteEngine(settings);
+
+db.Rebuild();
+
+
+
+//var reader = db.Query("hubData$AppOperations", Query.All());
+//var data = reader.ToList();
+
+Console.ReadKey();
\ No newline at end of file
diff --git a/LiteDB.sln b/LiteDB.sln
index 848dbb661..3f75c194f 100644
--- a/LiteDB.sln
+++ b/LiteDB.sln
@@ -13,6 +13,8 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LiteDB.Benchmarks", "LiteDB
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LiteDB.Stress", "LiteDB.Stress\LiteDB.Stress.csproj", "{FFBC5669-DA32-4907-8793-7B414279DA3B}"
EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ConsoleApp1", "ConsoleApp1\ConsoleApp1.csproj", "{E8763934-E46A-4AAF-A2B5-E812016DAF84}"
+EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -39,6 +41,10 @@ Global
{FFBC5669-DA32-4907-8793-7B414279DA3B}.Debug|Any CPU.Build.0 = Debug|Any CPU
{FFBC5669-DA32-4907-8793-7B414279DA3B}.Release|Any CPU.ActiveCfg = Release|Any CPU
{FFBC5669-DA32-4907-8793-7B414279DA3B}.Release|Any CPU.Build.0 = Release|Any CPU
+ {E8763934-E46A-4AAF-A2B5-E812016DAF84}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {E8763934-E46A-4AAF-A2B5-E812016DAF84}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {E8763934-E46A-4AAF-A2B5-E812016DAF84}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {E8763934-E46A-4AAF-A2B5-E812016DAF84}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
diff --git a/LiteDB/Client/Shared/SharedEngine.cs b/LiteDB/Client/Shared/SharedEngine.cs
index 95664d461..2325ae666 100644
--- a/LiteDB/Client/Shared/SharedEngine.cs
+++ b/LiteDB/Client/Shared/SharedEngine.cs
@@ -16,20 +16,14 @@ public class SharedEngine : ILiteEngine
{
private readonly EngineSettings _settings;
private readonly Mutex _mutex;
- private readonly LiteEngine _engine;
+ private LiteEngine _engine;
private int _stack = 0;
- public bool IsOpen => _engine.IsOpen;
public SharedEngine(EngineSettings settings)
{
_settings = settings;
- // kepp control over open/close engine
- _settings.AutoOpen = false;
-
- _engine = new LiteEngine(_settings);
-
var name = Path.GetFullPath(settings.Filename).ToLower().Sha1();
try
@@ -71,7 +65,7 @@ private void OpenDatabase()
try
{
- _engine.Open();
+ _engine = new LiteEngine(_settings);
}
catch
{
@@ -95,23 +89,13 @@ private void CloseDatabase()
if (_stack == 0)
{
_engine.Close();
+ _engine = null;
_mutex.ReleaseMutex();
}
}
}
- public bool Open()
- {
- return true; // controlled by OpenDatabase() - no external need
- }
-
- public bool Close()
- {
- return true; // controlled by CloseDatabase() - no external need
- }
-
-
#region Transaction Operations
public bool BeginTrans()
diff --git a/LiteDB/Document/Bson/BsonSerializer.cs b/LiteDB/Document/Bson/BsonSerializer.cs
index 6726357f9..9942086a5 100644
--- a/LiteDB/Document/Bson/BsonSerializer.cs
+++ b/LiteDB/Document/Bson/BsonSerializer.cs
@@ -38,7 +38,7 @@ public static BsonDocument Deserialize(byte[] buffer, bool utcDate = false, Hash
using (var reader = new BufferReader(buffer, utcDate))
{
- return reader.ReadDocument(fields);
+ return reader.ReadDocument(fields).GetValue();
}
}
}
diff --git a/LiteDB/Engine/Disk/Serializer/BufferReader.cs b/LiteDB/Engine/Disk/Serializer/BufferReader.cs
index 255ca4cb8..317a268a4 100644
--- a/LiteDB/Engine/Disk/Serializer/BufferReader.cs
+++ b/LiteDB/Engine/Disk/Serializer/BufferReader.cs
@@ -397,8 +397,8 @@ public BsonValue ReadIndexKey()
// Use +1 byte only for length
case BsonType.String: return this.ReadString(this.ReadByte());
- case BsonType.Document: return this.ReadDocument(null);
- case BsonType.Array: return this.ReadArray();
+ case BsonType.Document: return this.ReadDocument(null).GetValue();
+ case BsonType.Array: return this.ReadArray().GetValue();
// Use +1 byte only for length
case BsonType.Binary: return this.ReadBytes(this.ReadByte());
@@ -422,51 +422,66 @@ public BsonValue ReadIndexKey()
///
/// Read a BsonDocument from reader
///
- public BsonDocument ReadDocument(HashSet fields = null)
+ public Result ReadDocument(HashSet fields = null)
{
- var length = this.ReadInt32();
- var end = _position + length - 5;
- var remaining = fields == null || fields.Count == 0 ? null : new HashSet(fields, StringComparer.OrdinalIgnoreCase);
-
var doc = new BsonDocument();
- while (_position < end && (remaining == null || remaining?.Count > 0))
+ try
{
- var value = this.ReadElement(remaining, out string name);
+ var length = this.ReadInt32();
+ var end = _position + length - 5;
+ var remaining = fields == null || fields.Count == 0 ? null : new HashSet(fields, StringComparer.OrdinalIgnoreCase);
- // null value means are not selected field
- if (value != null)
+ while (_position < end && (remaining == null || remaining?.Count > 0))
{
- doc[name] = value;
+ var value = this.ReadElement(remaining, out string name);
- // remove from remaining fields
- remaining?.Remove(name);
+ // null value means are not selected field
+ if (value != null)
+ {
+ doc[name] = value;
+
+ // remove from remaining fields
+ remaining?.Remove(name);
+ }
}
- }
- this.MoveForward(1); // skip \0
+ this.MoveForward(1); // skip \0 ** can read disk here!
- return doc;
+ return doc;
+ }
+ catch (Exception ex)
+ {
+ return new Result(doc, ex);
+ }
}
///
/// Read an BsonArray from reader
///
- public BsonArray ReadArray()
+ public Result ReadArray()
{
- var length = this.ReadInt32();
- var end = _position + length - 5;
var arr = new BsonArray();
- while (_position < end)
+ try
{
- var value = this.ReadElement(null, out string name);
- arr.Add(value);
- }
+ var length = this.ReadInt32();
+ var end = _position + length - 5;
+
+ while (_position < end)
+ {
+ var value = this.ReadElement(null, out string name);
+ arr.Add(value);
+ }
- this.MoveForward(1); // skip \0
+ this.MoveForward(1); // skip \0
- return arr;
+ return arr;
+ }
+ catch (Exception ex)
+ {
+ return new Result(arr, ex);
+ }
}
///
@@ -513,11 +528,11 @@ private BsonValue ReadElement(HashSet remaining, out string name)
}
else if (type == 0x03) // Document
{
- return this.ReadDocument();
+ return this.ReadDocument().GetValue();
}
else if (type == 0x04) // Array
{
- return this.ReadArray();
+ return this.ReadArray().GetValue();
}
else if (type == 0x05) // Binary
{
diff --git a/LiteDB/Engine/Engine/Index.cs b/LiteDB/Engine/Engine/Index.cs
index edf5e65ea..4cd644fce 100644
--- a/LiteDB/Engine/Engine/Index.cs
+++ b/LiteDB/Engine/Engine/Index.cs
@@ -55,7 +55,7 @@ public bool EnsureIndex(string collection, string name, BsonExpression expressio
{
using (var reader = new BufferReader(data.Read(pkNode.DataBlock)))
{
- var doc = reader.ReadDocument(expression.Fields);
+ var doc = reader.ReadDocument(expression.Fields).GetValue();
// first/last node in this document that will be added
IndexNode last = null;
diff --git a/LiteDB/Engine/Engine/Rebuild.cs b/LiteDB/Engine/Engine/Rebuild.cs
index dd52c350e..6011163b0 100644
--- a/LiteDB/Engine/Engine/Rebuild.cs
+++ b/LiteDB/Engine/Engine/Rebuild.cs
@@ -10,11 +10,11 @@ namespace LiteDB.Engine
public partial class LiteEngine
{
///
- /// Implement a full database export/import. Database should be closed before Rebuild
+ /// Implement a full rebuild database. A backup copy will be created with -backup extention. All data will be readed and re created in another database
///
public long Rebuild(RebuildOptions options)
{
- if (_isOpen) throw LiteException.InvalidEngineState(false, "REBUILD");
+ this.Close();
// run build service
var rebuilder = new RebuildService(_settings);
@@ -23,7 +23,23 @@ public long Rebuild(RebuildOptions options)
options.Errors.Clear();
// return how many bytes of diference from original/rebuild version
- return rebuilder.Rebuild(options);
+ var diff = rebuilder.Rebuild(options);
+
+ // re-open database
+ this.Open();
+
+ return diff;
+ }
+
+ ///
+ /// Implement a full rebuild database. A backup copy will be created with -backup extention. All data will be readed and re created in another database
+ ///
+ public long Rebuild()
+ {
+ var collation = new Collation(this.Pragma(Pragmas.COLLATION));
+ var password = _settings.Password;
+
+ return this.Rebuild(new RebuildOptions { Password = password, Collation = collation });
}
///
diff --git a/LiteDB/Engine/Engine/Transaction.cs b/LiteDB/Engine/Engine/Transaction.cs
index 8199a179a..3cb4302c2 100644
--- a/LiteDB/Engine/Engine/Transaction.cs
+++ b/LiteDB/Engine/Engine/Transaction.cs
@@ -14,7 +14,7 @@ public partial class LiteEngine
///
public bool BeginTrans()
{
- if (!_isOpen) throw LiteException.InvalidEngineState(true, "TRANSACTION");
+ if (_disposed) throw LiteException.InvalidEngineState(true, "TRANSACTION");
var transacion = _monitor.GetTransaction(true, false, out var isNew);
@@ -32,7 +32,7 @@ public bool BeginTrans()
///
public bool Commit()
{
- if (!_isOpen) throw LiteException.InvalidEngineState(true, "TRANSACTION");
+ if (_disposed) throw LiteException.InvalidEngineState(true, "TRANSACTION");
var transaction = _monitor.GetTransaction(false, false, out _);
@@ -57,7 +57,7 @@ public bool Commit()
///
public bool Rollback()
{
- if (!_isOpen) throw LiteException.InvalidEngineState(true, "TRANSACTION");
+ if (_disposed) throw LiteException.InvalidEngineState(true, "TRANSACTION");
var transaction = _monitor.GetTransaction(false, false, out _);
@@ -78,7 +78,7 @@ public bool Rollback()
///
private T AutoTransaction(Func fn)
{
- if (!_isOpen) throw LiteException.InvalidEngineState(true, "TRANSACTION");
+ if (_disposed) throw LiteException.InvalidEngineState(true, "TRANSACTION");
var transaction = _monitor.GetTransaction(true, false, out var isNew);
diff --git a/LiteDB/Engine/EngineSettings.cs b/LiteDB/Engine/EngineSettings.cs
index ae7352fc8..67ee54b93 100644
--- a/LiteDB/Engine/EngineSettings.cs
+++ b/LiteDB/Engine/EngineSettings.cs
@@ -56,11 +56,6 @@ public class EngineSettings
///
public bool ReadOnly { get; set; } = false;
- ///
- /// Call Open() method in LiteEngine in class constructor
- ///
- public bool AutoOpen { get; set; } = true;
-
///
/// After a Close with exception do a database rebuild
///
diff --git a/LiteDB/Engine/FileReader/FileReaderError.cs b/LiteDB/Engine/FileReader/FileReaderError.cs
index 8d10c2711..e777ffd00 100644
--- a/LiteDB/Engine/FileReader/FileReaderError.cs
+++ b/LiteDB/Engine/FileReader/FileReaderError.cs
@@ -8,14 +8,13 @@ namespace LiteDB.Engine
{
///
///
- public class FileReaderError
+ internal class FileReaderError
{
public DateTime Created { get; } = DateTime.Now;
public FileOrigin Origin { get; set; }
public long Position { get; set; }
public uint? PageID { get; set; }
- public int Code { get; set; }
- public string Field { get; set; }
+ public PageType PageType { get; set; }
public string Message { get; set; }
public Exception Exception { get; set; }
}
diff --git a/LiteDB/Engine/FileReader/FileReaderV8.cs b/LiteDB/Engine/FileReader/FileReaderV8.cs
index fc4c48886..9a7d6616f 100644
--- a/LiteDB/Engine/FileReader/FileReaderV8.cs
+++ b/LiteDB/Engine/FileReader/FileReaderV8.cs
@@ -3,6 +3,7 @@
using System.Globalization;
using System.IO;
using System.Linq;
+using System.Reflection;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
@@ -11,10 +12,18 @@
namespace LiteDB.Engine
{
///
- /// Internal class to read all datafile documents - use only Stream - no cache system (database are modified during this read - shrink)
+ /// Internal class to read all datafile documents - use only Stream - no cache system. Read log file (read commited transtraction)
///
internal class FileReaderV8 : IFileReader
{
+ private struct PageInfo
+ {
+ public uint PageID;
+ public FileOrigin Origin;
+ public PageType PageType;
+ public long Position;
+ }
+
private readonly Dictionary _collections = new Dictionary();
private readonly Dictionary> _indexes = new Dictionary>();
private readonly Dictionary _pragmas = new Dictionary();
@@ -23,42 +32,202 @@ internal class FileReaderV8 : IFileReader
private Stream _dataStream;
private Stream _logStream;
private readonly IDictionary _logIndexMap = new Dictionary();
+ private uint _maxPageID; // a file-length based max pageID to be tested
- private bool _disposedValue;
+ private bool _disposed;
private readonly EngineSettings _settings;
private readonly IList _errors;
- public IDictionary GetPragmas() => _pragmas;
-
public FileReaderV8(EngineSettings settings, IList errors)
{
_settings = settings;
_errors = errors;
}
+ ///
+ /// Open data file and log file, read header and collection pages
+ ///
public void Open()
{
- var dataFactory = _settings.CreateDataFactory();
- var logFactory = _settings.CreateLogFactory();
+ try
+ {
+ var dataFactory = _settings.CreateDataFactory();
+ var logFactory = _settings.CreateLogFactory();
- _dataStream = dataFactory.GetStream(true, true, false);
+ // get maxPageID based on both file length
+ _maxPageID = (uint)((dataFactory.GetLength() + logFactory.GetLength()) / PAGE_SIZE);
- _dataStream.Position = 0;
+ _dataStream = dataFactory.GetStream(true, true, false);
- if (logFactory.Exists())
- {
- _logStream = logFactory.GetStream(false, false, true);
+ _dataStream.Position = 0;
+
+ if (logFactory.Exists())
+ {
+ _logStream = logFactory.GetStream(false, false, true);
- this.LoadIndexMap();
+ this.LoadIndexMap();
+ }
+
+ this.LoadPragmas();
+
+ this.LoadDataPages();
+
+ this.LoadCollections();
+
+ this.LoadIndexes();
+ }
+ catch (Exception ex)
+ {
+ this.HandleError(ex, new PageInfo());
}
+ }
+
+ ///
+ /// Read all pragma values
+ ///
+ public IDictionary GetPragmas() => _pragmas;
+
+ ///
+ /// Read all collection based on header page
+ ///
+ public IEnumerable GetCollections() => _collections.Keys;
+
+ ///
+ /// Read all indexes from all collection pages (except _id index)
+ ///
+ public IEnumerable GetIndexes(string collection) => _indexes[collection];
+
+ ///
+ /// Read all documents from current collection with NO index use - read direct from free lists
+ /// There is no document order
+ ///
+ public IEnumerable GetDocuments(string collection)
+ {
+ var colID = _collections[collection];
+ var dataPages = _collectionsDataPages[colID];
+ var uniqueIDs = new HashSet();
+
+ foreach(var dataPage in dataPages)
+ {
+ var page = this.ReadPage(dataPage, out var pageInfo);
+
+ if (page.Fail)
+ {
+ this.HandleError(page.Exception, pageInfo);
+ continue;
+ }
+
+ var buffer = page.Value.Buffer;
+ var itemsCount = page.Value.ItemsCount;
+ var highestIndex = page.Value.HighestIndex;
+
+ // no items
+ if (itemsCount == 0 || highestIndex == byte.MaxValue) continue;
+
+ for (byte i = 0; i <= highestIndex; i++)
+ {
+ BsonDocument doc;
+
+ // try/catch block per dataBlock extend=false
+ try
+ {
+ // resolve slot address
+ var positionAddr = BasePage.CalcPositionAddr(i);
+ var lengthAddr = BasePage.CalcLengthAddr(i);
+
+ // read segment position/length
+ var position = buffer.ReadUInt16(positionAddr);
+ var length = buffer.ReadUInt16(lengthAddr);
+
+ // empty slot
+ if (length == 0) continue;
+
+ // get segment slice
+ var segment = buffer.Slice(position, length);
+ var extend = segment.ReadBool(DataBlock.P_EXTEND);
+ var nextBlock = segment.ReadPageAddress(DataBlock.P_NEXT_BLOCK);
+ var data = segment.Slice(DataBlock.P_BUFFER, segment.Count - DataBlock.P_BUFFER);
+
+ if (extend) continue; // ignore extend block (start only in first data block)
+
+ // merge all data block content into a single memory stream and read bson document
+ using (var mem = new MemoryStream())
+ {
+ // write first block
+ mem.Write(data.Array, data.Offset, data.Count);
- this.LoadPragmas();
+ while (nextBlock.IsEmpty == false)
+ {
+ // read next page block
+ var nextPage = this.ReadPage(nextBlock.PageID, out pageInfo);
- this.LoadCollections();
+ if (nextPage.Fail) throw nextPage.Exception;
+ var nextBuffer = nextPage.Value.Buffer;
+ // make page validations
+ ENSURE(nextPage.Value.PageType == PageType.Data, $"Invalid PageType (excepted Data, get {nextPage.Value.PageType})");
+ ENSURE(nextPage.Value.ColID == colID, $"Invalid ColID in this page (expected {colID}, get {nextPage.Value.ColID})");
+ ENSURE(nextPage.Value.ItemsCount > 0, "Page with no items count");
+ // read slot address
+ positionAddr = BasePage.CalcPositionAddr(i);
+ lengthAddr = BasePage.CalcLengthAddr(i);
+
+ // read segment position/length
+ position = nextBuffer.ReadUInt16(positionAddr);
+ length = nextBuffer.ReadUInt16(lengthAddr);
+
+ // empty slot
+ ENSURE(length > 0, $"Last DataBlock request a next extend to {nextBlock}, but this block are empty footer");
+
+ // get segment slice
+ segment = nextBuffer.Slice(position, length);
+ extend = segment.ReadBool(DataBlock.P_EXTEND);
+ nextBlock = segment.ReadPageAddress(DataBlock.P_NEXT_BLOCK);
+ data = segment.Slice(DataBlock.P_BUFFER, segment.Count - DataBlock.P_BUFFER);
+
+ ENSURE(extend == true, $"Next datablock always be an extend. Invalid data block {nextBlock}");
+
+ // write data on memorystream
+
+ mem.Write(data.Array, data.Offset, data.Count);
+ }
+
+ // read all data array in bson document
+ using (var r = new BufferReader(mem.ToArray(), false))
+ {
+ var docResult = r.ReadDocument();
+ var id = docResult.Value["_id"];
+
+ ENSURE(!(id == BsonValue.Null || id == BsonValue.MinValue || id == BsonValue.MaxValue), $"Invalid _id value: {id}");
+ ENSURE(uniqueIDs.Contains(id) == false, $"Duplicated _id value: {id}");
+
+ uniqueIDs.Add(id);
+
+ if (docResult.Fail)
+ {
+ this.HandleError(docResult.Exception, pageInfo);
+ }
+
+ doc = docResult.Value;
+ }
+ }
+ }
+ // try/catch block per dataBlock extend=false
+ catch (Exception ex)
+ {
+ this.HandleError(ex, pageInfo);
+ doc = null;
+ }
+
+ if (doc != null)
+ {
+ yield return doc;
+ }
+ }
+ }
}
///
@@ -66,14 +235,13 @@ public void Open()
///
private void LoadPragmas()
{
- var origin = FileOrigin.None;
- var position = 0L;
+ if (_disposed) return;
- try
- {
- var header = this.ReadPage(0, out origin, out position);
+ var result = this.ReadPage(0, out var pageInfo);
- var buffer = header.Buffer;
+ if (result.Ok)
+ {
+ var buffer = result.Value.Buffer;
_pragmas[Pragmas.USER_VERSION] = buffer.ReadInt32(EnginePragmas.P_USER_VERSION);
_pragmas[Pragmas.CHECKPOINT] = buffer.ReadInt32(EnginePragmas.P_CHECKPOINT);
@@ -81,17 +249,9 @@ private void LoadPragmas()
_pragmas[Pragmas.UTC_DATE] = buffer.ReadBool(EnginePragmas.P_UTC_DATE);
_pragmas[Pragmas.LIMIT_SIZE] = buffer.ReadInt64(EnginePragmas.P_LIMIT_SIZE);
}
- catch (Exception ex)
+ else
{
- _errors.Add(new FileReaderError
- {
- Origin = origin,
- Position = position,
- PageID = 0,
- Code = 1,
- Message = $"Header pragmas could not be loaded",
- Exception = ex
- });
+ this.HandleError(result.Exception, pageInfo);
}
}
@@ -100,25 +260,37 @@ private void LoadPragmas()
///
private void LoadDataPages()
{
- var header = ReadPage(0, out _, out _);
+ if (_disposed) return;
+ var header = this.ReadPage(0, out var pageInfo).GetValue();
var lastPageID = header.Buffer.ReadUInt32(HeaderPage.P_LAST_PAGE_ID);
+ ENSURE(lastPageID <= _maxPageID, $"LastPageID {lastPageID} should be less or equals to maxPageID {_maxPageID}");
+
for (uint i = 0; i < lastPageID; i++)
{
- var page = ReadPage(i, out _, out _);
+ var result = this.ReadPage(i, out pageInfo);
- if (page.PageType == PageType.Data)
+ if (result.Ok)
{
- if (_collectionsDataPages.TryGetValue(page.ColID, out var list))
- {
- list.Add(page.PageID);
- }
- else
+ var page = result.Value;
+
+ if (page.PageType == PageType.Data)
{
- _collectionsDataPages[page.ColID] = new List { page.PageID };
+ if (_collectionsDataPages.TryGetValue(page.ColID, out var list))
+ {
+ list.Add(page.PageID);
+ }
+ else
+ {
+ _collectionsDataPages[page.ColID] = new List { page.PageID };
+ }
}
}
+ else
+ {
+ this.HandleError(result.Exception, pageInfo);
+ }
}
}
@@ -127,37 +299,48 @@ private void LoadDataPages()
///
private void LoadCollections()
{
- try
+ if (_disposed) return;
+
+ var header = this.ReadPage(0, out var pageInfo).GetValue();
+
+ var area = header.Buffer.Slice(HeaderPage.P_COLLECTIONS, HeaderPage.COLLECTIONS_SIZE);
+
+ using (var r = new BufferReader(new[] { area }, false))
{
- var header = this.ReadPage(0, out _, out _);
+ var result = r.ReadDocument();
- var area = header.Buffer.Slice(HeaderPage.P_COLLECTIONS, HeaderPage.COLLECTIONS_SIZE);
+ // can't be fully read
+ var collections = result.Value;
- using (var r = new BufferReader(new[] { area }, false))
+ foreach (var key in collections.Keys)
{
- var collections = r.ReadDocument();
+ // collections.key = collection name
+ // collections.value = collection PageID
+ var colID = collections[key];
- foreach (var key in collections.Keys)
+ if (colID.IsNumber == false)
+ {
+ this.HandleError($"ColID expect a number but get {colID}", pageInfo);
+ }
+ else
{
- // collections.key = collection name
- // collections.value = collection PageID
_collections[key] = (uint)collections[key].AsInt32;
}
}
- // for each collection loaded by datapages, check if exists in _collections
- foreach(var collection in _collectionsDataPages)
+ if (result.Fail)
{
- if (!_collections.ContainsValue(collection.Key))
- {
- _collections["col_" + collection.Key] = collection.Key;
- }
+ this.HandleError(result.Exception, pageInfo);
}
-
}
- catch (Exception ex)
- {
+ // for each collection loaded by datapages, check if exists in _collections
+ foreach(var collection in _collectionsDataPages)
+ {
+ if (!_collections.ContainsValue(collection.Key))
+ {
+ _collections["col_" + collection.Key] = collection.Key;
+ }
}
}
@@ -167,57 +350,75 @@ private void LoadCollections()
///
private void LoadIndexes()
{
- foreach(var collection in _collections)
+ if (_disposed) return;
+
+ foreach (var collection in _collections)
{
- var page = ReadPage(collection.Value, out _, out _);
+ var result = this.ReadPage(collection.Value, out var pageInfo);
+
+ if (result.Fail)
+ {
+ this.HandleError(result.Exception, pageInfo);
+ continue;
+ }
+
+ var page = result.Value;
var buffer = page.Buffer;
var count = buffer.ReadByte(CollectionPage.P_INDEXES); // 1 byte
var position = CollectionPage.P_INDEXES + 1;
- for (var i = 0; i < count; i++)
+ // handle error per collection
+ try
{
- position += 2; // skip: slot (1 byte) + indexType (1 byte)
-
- var name = buffer.ReadCString(position, out var nameLength);
- // depois de ler, validar se a position ainda é válida (se é < 8192)
- // validar o tamanho do nome do índice para ver se o nome lido é válido
+ for (var i = 0; i < count; i++)
+ {
+ position += 2; // skip: slot (1 byte) + indexType (1 byte)
- position += nameLength;
+ var name = buffer.ReadCString(position, out var nameLength);
+ // depois de ler, validar se a position ainda é válida (se é < 8192)
+ // validar o tamanho do nome do índice para ver se o nome lido é válido
- var expr = buffer.ReadCString(position, out var exprLength);
- // depois de ler, validar se a position ainda é válida (se é < 8192)
- // validar se a expr é válida
+ position += nameLength;
- position += exprLength;
+ var expr = buffer.ReadCString(position, out var exprLength);
+ // depois de ler, validar se a position ainda é válida (se é < 8192)
+ // validar se a expr é válida
- var unique = buffer.ReadBool(position);
- // depois de ler, validar se a position ainda é válida (se é < 8192)
+ position += exprLength;
- position += 15; // head 5 bytes, tail 5 bytes, maxLevel 1 byte, freeIndexPageList 4 bytes
+ var unique = buffer.ReadBool(position);
+ // depois de ler, validar se a position ainda é válida (se é < 8192)
- var indexInfo = new IndexInfo
- {
- Collection = collection.Key,
- Name = name,
- Expression = expr,
- Unique = unique
- };
+ position += 15; // head 5 bytes, tail 5 bytes, maxLevel 1 byte, freeIndexPageList 4 bytes
- // ignore _id index
- if (name == "_id") continue;
+ var indexInfo = new IndexInfo
+ {
+ Collection = collection.Key,
+ Name = name,
+ Expression = expr,
+ Unique = unique
+ };
- if (_indexes.TryGetValue(collection.Key, out var indexInfos))
- {
- indexInfos.Add(indexInfo);
- }
- else
- {
- _indexes[collection.Key] = new List { indexInfo };
+ // ignore _id index
+ if (name == "_id") continue;
+
+ if (_indexes.TryGetValue(collection.Key, out var indexInfos))
+ {
+ indexInfos.Add(indexInfo);
+ }
+ else
+ {
+ _indexes[collection.Key] = new List { indexInfo };
+ }
}
}
+ catch (Exception ex)
+ {
+ this.HandleError(ex, pageInfo);
+ continue;
+ }
}
-
}
///
@@ -235,30 +436,6 @@ public static bool IsVersion(byte[] buffer)
buffer[0] == 1;
}
- ///
- /// Read all collection based on header page
- ///
- public IEnumerable GetCollections() => _collections.Keys;
-
- ///
- /// Read all indexes from all collection pages (except _id index)
- ///
- public IEnumerable GetIndexes(string collection) => _indexes[collection];
-
- ///
- /// Read all documents from current collection with NO index use - read direct from free lists
- /// There is no document order
- ///
- public IEnumerable GetDocuments(string collection)
- {
- var colPageID = _collections[collection];
- var dataPages = _collectionsDataPages[colPageID];
-
- // varrer tudo a partir dos dataPages
-
- return null;
- }
-
///
/// Load log file to build index map (wal map index)
///
@@ -268,6 +445,7 @@ private void LoadIndexMap()
var transactions = new Dictionary>();
var confirmedTransactions = new List();
var currentPosition = 0L;
+ var pageInfo = new PageInfo { Origin = FileOrigin.Log };
_logStream.Position = 0;
@@ -281,44 +459,45 @@ private void LoadIndexMap()
continue;
}
- _logStream.Position = currentPosition;
+ _logStream.Position = pageInfo.Position = currentPosition;
- var read = _logStream.Read(buffer.Array, buffer.Offset, PAGE_SIZE);
+ try
+ {
+ var read = _logStream.Read(buffer.Array, buffer.Offset, PAGE_SIZE);
- var pageID = buffer.ReadUInt32(BasePage.P_PAGE_ID);
- var isConfirmed = buffer.ReadBool(BasePage.P_IS_CONFIRMED);
- var transactionID = buffer.ReadUInt32(BasePage.P_TRANSACTION_ID);
+ var pageID = buffer.ReadUInt32(BasePage.P_PAGE_ID);
+ var isConfirmed = buffer.ReadBool(BasePage.P_IS_CONFIRMED);
+ var transactionID = buffer.ReadUInt32(BasePage.P_TRANSACTION_ID);
- if (read != PAGE_SIZE)
- {
- _errors.Add(new FileReaderError
- {
- Origin = FileOrigin.Log,
- Position = _logStream.Position,
- PageID = pageID,
- Code = 1,
- Message = $"Page position {_logStream} read only than {read} bytes (instead {PAGE_SIZE})"
- });
- }
+ pageInfo.PageID = pageID;
- var position = new PagePosition(pageID, currentPosition);
+ ENSURE(read == PAGE_SIZE, $"Page position {_logStream} read only than {read} bytes (instead {PAGE_SIZE})");
- if (transactions.TryGetValue(transactionID, out var list))
- {
- list.Add(position);
+ var position = new PagePosition(pageID, currentPosition);
+
+ if (transactions.TryGetValue(transactionID, out var list))
+ {
+ list.Add(position);
+ }
+ else
+ {
+ transactions[transactionID] = new List { position };
+ }
+
+ // when page confirm transaction, add to confirmed transaction list
+ if (isConfirmed)
+ {
+ confirmedTransactions.Add(transactionID);
+ }
}
- else
+ catch (Exception ex)
{
- transactions[transactionID] = new List { position };
+ this.HandleError(ex, pageInfo);
}
-
- // when page confirm transaction, add to confirmed transaction list
- if (isConfirmed)
+ finally
{
- confirmedTransactions.Add(transactionID);
+ currentPosition += PAGE_SIZE;
}
-
- currentPosition += PAGE_SIZE;
}
// now, log index map using only confirmed transactions (override with last transactionID)
@@ -335,41 +514,86 @@ private void LoadIndexMap()
}
///
- /// Read page from stream
+ /// Read page from data/log stream (checks in logIndexMap file/position). Capture any exception here, but don't call HandleError
///
- private BasePage ReadPage(uint pageID, out FileOrigin origin, out long position)
+ private Result ReadPage(uint pageID, out PageInfo pageInfo)
{
- var pageBuffer = new PageBuffer(new byte[PAGE_SIZE], 0, PAGE_SIZE);
+ pageInfo = new PageInfo { PageID = pageID };
- // get data from log file or original file
- if (_logIndexMap.TryGetValue(pageID, out position))
+ try
{
- origin = FileOrigin.Log;
+ ENSURE(pageID <= _maxPageID, $"PageID: {pageID} should be less then or equals to maxPageID: {_maxPageID}");
+
+ var pageBuffer = new PageBuffer(new byte[PAGE_SIZE], 0, PAGE_SIZE);
+ Stream stream;
+ int read;
- _logStream.Position = position;
- _logStream.Read(pageBuffer.Array, pageBuffer.Offset, pageBuffer.Count);
+ // get data from log file or original file
+ if (_logIndexMap.TryGetValue(pageID, out pageInfo.Position))
+ {
+ pageInfo.Origin = FileOrigin.Log;
+ stream = _logStream;
+ }
+ else
+ {
+ pageInfo.Origin = FileOrigin.Data;
+ pageInfo.Position = BasePage.GetPagePosition(pageID);
+
+ stream = _dataStream;
+ }
+
+ stream.Position = pageInfo.Position;
+
+ read = stream.Read(pageBuffer.Array, pageBuffer.Offset, pageBuffer.Count);
+
+ ENSURE(read == PAGE_SIZE, $"Page position {_logStream.Position} read only than {read} bytes (instead {PAGE_SIZE})");
+
+ var page = new BasePage(pageBuffer);
+
+ ENSURE(page.PageID == pageID, $"Expect read pageID: {pageID} but header contains pageID: {page.PageID}");
+
+ return page;
}
- else
+ catch (Exception ex)
{
- origin = FileOrigin.Data;
- position = BasePage.GetPagePosition(pageID);
-
- _dataStream.Position = position;
- _dataStream.Read(pageBuffer.Array, pageBuffer.Offset, pageBuffer.Count);
+ return new Result(null, ex);
}
+ }
- var page = new BasePage(pageBuffer);
+ ///
+ /// Handle any error avoiding throw exceptions during process. If exception must stop process (ioexceptions), throw exception
+ /// Add errors to log and continue reading data file
+ ///
+ private void HandleError(Exception ex, PageInfo pageInfo)
+ {
+ _errors.Add(new FileReaderError
+ {
+ Position = pageInfo.Position,
+ Origin = pageInfo.Origin,
+ PageID = pageInfo.PageID,
+ PageType = pageInfo.PageType,
+ Message = ex.Message,
+ Exception = ex,
+ });
+
+ if (ex is IOException)
+ {
+ // Código de erros HResult do IOException
+ // https://learn.microsoft.com/pt-br/windows/win32/debug/system-error-codes--0-499-
- return page;
+ throw ex;
+ }
}
+ private void HandleError(string message, PageInfo pageInfo) => this.HandleError(new Exception(message), pageInfo);
+
protected virtual void Dispose(bool disposing)
{
- if (!_disposedValue)
+ if (!_disposed)
{
_dataStream?.Dispose();
_logStream?.Dispose();
- _disposedValue = true;
+ _disposed = true;
}
}
diff --git a/LiteDB/Engine/ILiteEngine.cs b/LiteDB/Engine/ILiteEngine.cs
index aa930282f..00848de2c 100644
--- a/LiteDB/Engine/ILiteEngine.cs
+++ b/LiteDB/Engine/ILiteEngine.cs
@@ -5,11 +5,6 @@ namespace LiteDB.Engine
{
public interface ILiteEngine : IDisposable
{
- bool IsOpen { get; }
-
- bool Open();
- bool Close();
-
int Checkpoint();
long Rebuild(RebuildOptions settings);
diff --git a/LiteDB/Engine/LiteEngine.cs b/LiteDB/Engine/LiteEngine.cs
index 0bd8e8650..f451c3645 100644
--- a/LiteDB/Engine/LiteEngine.cs
+++ b/LiteDB/Engine/LiteEngine.cs
@@ -1,4 +1,6 @@
-using System;
+using LiteDB.Utils;
+
+using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Diagnostics;
@@ -34,9 +36,9 @@ public partial class LiteEngine : ILiteEngine
private readonly EngineSettings _settings;
///
- /// Indicate that database is open or not (auto-open in Ctor)
+ /// Indicate this instance already called Dispose() and no more actions can be done
///
- private bool _isOpen = false;
+ private bool _disposed = false;
///
/// All system read-only collections for get metadata database information
@@ -50,8 +52,6 @@ public partial class LiteEngine : ILiteEngine
#endregion
- public bool IsOpen => _isOpen;
-
#region Ctor
///
@@ -84,11 +84,8 @@ public LiteEngine(EngineSettings settings)
#region Open & Close
- public bool Open()
+ private bool Open()
{
- // do not re-open database
- if (_isOpen) return false;
-
LOG($"start initializing{(_settings.ReadOnly ? " (readonly)" : "")}", "ENGINE");
_systemCollections = new Dictionary(StringComparer.OrdinalIgnoreCase);
@@ -158,8 +155,6 @@ public bool Open()
LOG("initialization completed", "ENGINE");
- _isOpen = true;
-
return true;
}
catch (Exception ex)
@@ -179,36 +174,33 @@ public bool Open()
/// - Close disks
/// - Clean variables
///
- public bool Close()
+ public List Close()
{
- if (_isOpen == false)
-
- _isOpen = false;
-
- try
- {
- // stop running all transactions
- _monitor?.Dispose();
+ if (_disposed) return new List();
- // do a soft checkpoint (only if exclusive lock is possible)
- if (_header?.Pragmas.Checkpoint > 0) _walIndex?.TryCheckpoint();
+ _disposed = true;
- // close all disk streams (and delete log if empty)
- _disk?.Dispose();
+ var tc = new TryCatch();
- // delete sort temp file
- _sortDisk?.Dispose();
+ // stop running all transactions
+ tc.Catch(() => _monitor?.Dispose());
- // dispose lockers
- _locker?.Dispose();
- }
- finally
+ if (_header?.Pragmas.Checkpoint > 0)
{
- this.CleanServiceFields();
+ // do a soft checkpoint (only if exclusive lock is possible)
+ tc.Catch(() => _walIndex?.TryCheckpoint());
}
- return true;
+ // close all disk streams (and delete log if empty)
+ tc.Catch(() => _disk?.Dispose());
+
+ // delete sort temp file
+ tc.Catch(() => _sortDisk?.Dispose());
+ // dispose lockers
+ tc.Catch(() => _locker?.Dispose());
+
+ return tc.Exceptions;
}
///
@@ -219,48 +211,34 @@ public bool Close()
/// - Checks Exception type for DataCorruped to auto rebuild on open
/// - Clean variables
///
- internal void Close(Exception ex)
+ internal List Close(Exception ex)
{
- _isOpen = false;
+ if (_disposed) return new List();
+
+ _disposed = true;
+
+ var tc = new TryCatch(ex);
// stop running queue to write
- _disk?.Queue.Abort();
+ tc.Catch(() => _disk?.Queue.Abort());
- try
- {
- // close disks streams
- _disk?.Dispose();
+ // close disks streams
+ tc.Catch(() => _disk?.Dispose());
- _monitor?.Dispose();
+ tc.Catch(() => _monitor?.Dispose());
- _sortDisk?.Dispose();
+ tc.Catch(() => _sortDisk?.Dispose());
- _locker.Dispose();
- }
- finally
- {
- if (ex is LiteException liteEx && liteEx.ErrorCode == LiteException.INVALID_DATAFILE_STATE)
- {
- // mark byte = 1 in HeaderPage.P_INVALID_DATAFILE_STATE - will open in auto-rebuild
- // this method will throw no errors
- new RebuildService(_settings).MarkAsInvalidState();
- }
+ tc.Catch(() => _locker.Dispose());
- this.CleanServiceFields();
+ if (tc.InvalidDatafileState)
+ {
+ // mark byte = 1 in HeaderPage.P_INVALID_DATAFILE_STATE - will open in auto-rebuild
+ // this method will throw no errors
+ tc.Catch(() => new RebuildService(_settings).MarkAsInvalidState());
}
- }
- internal void CleanServiceFields()
- {
- // clear all field member (to work if open method)
- _disk = null;
- _header = null;
- _locker = null;
- _walIndex = null;
- _sortDisk = null;
- _monitor = null;
- _systemCollections = null;
- _sequences = null;
+ return tc.Exceptions;
}
#endregion
diff --git a/LiteDB/Engine/Pages/HeaderPage.cs b/LiteDB/Engine/Pages/HeaderPage.cs
index ed8407673..30c8c7bc9 100644
--- a/LiteDB/Engine/Pages/HeaderPage.cs
+++ b/LiteDB/Engine/Pages/HeaderPage.cs
@@ -131,7 +131,7 @@ private void LoadPage()
using (var r = new BufferReader(new[] { area }, false))
{
- _collections = r.ReadDocument();
+ _collections = r.ReadDocument().GetValue();
}
_isCollectionsChanged = false;
diff --git a/LiteDB/Engine/Query/Lookup/DatafileLookup.cs b/LiteDB/Engine/Query/Lookup/DatafileLookup.cs
index 49fcb4b8d..62f3958e9 100644
--- a/LiteDB/Engine/Query/Lookup/DatafileLookup.cs
+++ b/LiteDB/Engine/Query/Lookup/DatafileLookup.cs
@@ -30,7 +30,7 @@ public virtual BsonDocument Load(PageAddress rawId)
{
using (var reader = new BufferReader(_data.Read(rawId), _utcDate))
{
- var doc = reader.ReadDocument(_fields);
+ var doc = reader.ReadDocument(_fields).GetValue();
doc.RawId = rawId;
diff --git a/LiteDB/Engine/Services/RebuildService.cs b/LiteDB/Engine/Services/RebuildService.cs
index f9fe92afb..64d4ee91f 100644
--- a/LiteDB/Engine/Services/RebuildService.cs
+++ b/LiteDB/Engine/Services/RebuildService.cs
@@ -38,57 +38,48 @@ public long Rebuild(RebuildOptions options)
var tempFilename = FileHelper.GetSufixFile(_settings.Filename, "-temp", true);
// open file reader
- var reader = _fileVersion == 7 ?
- new FileReaderV7(_settings) :
- (IFileReader)new FileReaderV8(_settings, options.Errors);
-
- // open file reader and ready to import to new temp engine instance
- reader.Open();
-
- // open new engine to recive all data readed from FileReader
- using (var engine = new LiteEngine(new EngineSettings
- {
- Filename = tempFilename,
- Collation = options.Collation,
- Password = options.Password
- }))
+ using (var reader = _fileVersion == 7 ?
+ new FileReaderV7(_settings) :
+ (IFileReader)new FileReaderV8(_settings, options.Errors))
{
- // copy all database to new Log file with NO checkpoint during all rebuild
- engine.Pragma(Pragmas.CHECKPOINT, 0);
+ // open file reader and ready to import to new temp engine instance
+ reader.Open();
- // rebuild all content from reader into new engine
- engine.RebuildContent(reader);
-
- // insert error report
- if (options.IncludeErrorReport && options.Errors.Count > 0)
+ // open new engine to recive all data readed from FileReader
+ using (var engine = new LiteEngine(new EngineSettings
+ {
+ Filename = tempFilename,
+ Collation = options.Collation,
+ Password = options.Password
+ }))
{
- // a random buildId to group by event
- var buildId = Guid.NewGuid().ToString("d").ToLower().Substring(6);
+ // copy all database to new Log file with NO checkpoint during all rebuild
+ engine.Pragma(Pragmas.CHECKPOINT, 0);
- var docs = options.Errors.Select(x => new BsonDocument
+ // rebuild all content from reader into new engine
+ engine.RebuildContent(reader);
+
+ // insert error report
+ if (options.IncludeErrorReport && options.Errors.Count > 0)
{
- ["buildId"] = buildId,
- ["created"] = x.Created,
- ["pageID"] = (int)x.PageID,
- ["code"] = x.Code,
- ["field"] = x.Field,
- ["message"] = x.Message,
- });
-
- engine.Insert("_rebuild_errors", docs, BsonAutoId.Int32);
- }
+ var report = options.GetErrorReport();
+
+ engine.Insert("_rebuild_errors", report, BsonAutoId.Int32);
+ }
- // update pragmas
- var pragmas = reader.GetPragmas();
+ // update pragmas
+ var pragmas = reader.GetPragmas();
- engine.Pragma(Pragmas.CHECKPOINT, pragmas[Pragmas.CHECKPOINT]);
- engine.Pragma(Pragmas.TIMEOUT, pragmas[Pragmas.TIMEOUT]);
- engine.Pragma(Pragmas.LIMIT_SIZE, pragmas[Pragmas.LIMIT_SIZE]);
- engine.Pragma(Pragmas.UTC_DATE, pragmas[Pragmas.UTC_DATE]);
- engine.Pragma(Pragmas.USER_VERSION, pragmas[Pragmas.USER_VERSION]);
+ engine.Pragma(Pragmas.CHECKPOINT, pragmas[Pragmas.CHECKPOINT]);
+ engine.Pragma(Pragmas.TIMEOUT, pragmas[Pragmas.TIMEOUT]);
+ engine.Pragma(Pragmas.LIMIT_SIZE, pragmas[Pragmas.LIMIT_SIZE]);
+ engine.Pragma(Pragmas.UTC_DATE, pragmas[Pragmas.UTC_DATE]);
+ engine.Pragma(Pragmas.USER_VERSION, pragmas[Pragmas.USER_VERSION]);
+
+ // after rebuild, copy log bytes into data file
+ engine.Checkpoint();
+ }
- // after rebuild, copy log bytes into data file
- engine.Checkpoint();
}
// rename source filename to backup name
diff --git a/LiteDB/Engine/Structures/RebuildOptions.cs b/LiteDB/Engine/Structures/RebuildOptions.cs
index d52962a2e..cc48e9c0a 100644
--- a/LiteDB/Engine/Structures/RebuildOptions.cs
+++ b/LiteDB/Engine/Structures/RebuildOptions.cs
@@ -1,5 +1,6 @@
using System;
using System.Collections.Generic;
+using System.Linq;
using System.Text;
using static LiteDB.Constants;
@@ -10,6 +11,11 @@ namespace LiteDB.Engine
///
public class RebuildOptions
{
+ ///
+ /// A random BuildID identifier
+ ///
+ private string _buildId = Guid.NewGuid().ToString("d").ToLower().Substring(6);
+
///
/// Rebuild database with a new password
///
@@ -29,6 +35,33 @@ public class RebuildOptions
///
/// After run rebuild process, get a error report (empty if no error detected)
///
- public IList Errors { get; } = new List();
+ internal IList Errors { get; } = new List();
+
+ ///
+ /// Get a list of errors during rebuild process
+ ///
+ public IEnumerable GetErrorReport()
+ {
+ var docs = this.Errors.Select(x => new BsonDocument
+ {
+ ["buildId"] = _buildId,
+ ["created"] = x.Created,
+ ["pageID"] = (int)x.PageID,
+ ["positionID"] = (long)x.Position,
+ ["origin"] = x.Origin.ToString(),
+ ["pageType"] = x.PageType.ToString(),
+ ["message"] = x.Message,
+ ["exception"] = new BsonDocument
+ {
+ ["code"] = (x.Exception is LiteException lex ? lex.ErrorCode : -1),
+ ["hresult"] = x.Exception.HResult,
+ ["type"] = x.Exception.GetType().FullName,
+ ["inner"] = x.Exception.InnerException?.Message,
+ ["stacktrace"] = x.Exception.StackTrace
+ },
+ });
+
+ return docs;
+ }
}
}
\ No newline at end of file
diff --git a/LiteDB/Utils/Constants.cs b/LiteDB/Utils/Constants.cs
index 614001f31..9a6b2b03a 100644
--- a/LiteDB/Utils/Constants.cs
+++ b/LiteDB/Utils/Constants.cs
@@ -6,6 +6,7 @@
#if DEBUG
[assembly: InternalsVisibleTo("LiteDB.Tests")]
+[assembly: InternalsVisibleTo("ConsoleApp1")]
#endif
namespace LiteDB
diff --git a/LiteDB/Utils/Extensions/BufferSliceExtensions.cs b/LiteDB/Utils/Extensions/BufferSliceExtensions.cs
index f011e038f..3e7422d01 100644
--- a/LiteDB/Utils/Extensions/BufferSliceExtensions.cs
+++ b/LiteDB/Utils/Extensions/BufferSliceExtensions.cs
@@ -114,7 +114,7 @@ public static string ReadCString(this BufferSlice buffer, int offset, out int le
}
}
- return Encoding.UTF8.GetString(buffer.Array, buffer.Offset + offset, length);
+ return Encoding.UTF8.GetString(buffer.Array, buffer.Offset + offset, length - 1);
}
///
@@ -142,13 +142,13 @@ public static BsonValue ReadIndexKey(this BufferSlice buffer, int offset)
using (var r = new BufferReader(buffer))
{
r.Skip(offset); // skip first byte for value.Type
- return r.ReadDocument();
+ return r.ReadDocument().GetValue();
}
case BsonType.Array:
using (var r = new BufferReader(buffer))
{
r.Skip(offset); // skip first byte for value.Type
- return r.ReadArray();
+ return r.ReadArray().GetValue();
}
case BsonType.Binary:
diff --git a/LiteDB/Utils/LiteException.cs b/LiteDB/Utils/LiteException.cs
index a28db9542..5270cde9f 100644
--- a/LiteDB/Utils/LiteException.cs
+++ b/LiteDB/Utils/LiteException.cs
@@ -82,6 +82,11 @@ internal LiteException (int code, Exception inner, string message, params object
this.ErrorCode = code;
}
+ ///
+ /// Critical error should be stop engine and release data files and all memory allocation
+ ///
+ public bool IsCritical => this.ErrorCode >= 900;
+
#endregion
#region Method Errors
@@ -332,7 +337,7 @@ internal static LiteException InvalidDatafileState(string message)
{
return new LiteException(INVALID_DATAFILE_STATE, "LiteDB found inconsistency in data or memory pages. " +
"Your database may be corrupted. On the next opening a rebuild process will be executed. " +
- "Add parameter AutoRebuild=true in LiteDatabase initialization. " +
+ "Add parameter AutoRebuild=true in LiteDatabase connection string initialization. " +
"Inner message:" + message);
}
#endregion
diff --git a/LiteDB/Utils/Result.cs b/LiteDB/Utils/Result.cs
new file mode 100644
index 000000000..fd6425828
--- /dev/null
+++ b/LiteDB/Utils/Result.cs
@@ -0,0 +1,43 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Text;
+using static LiteDB.Constants;
+
+namespace LiteDB
+{
+ ///
+ /// Implement a generic result structure with value and exception. This value can be partial value (like BsonDocument/Array)
+ ///
+ internal struct Result
+ where T : class
+ {
+ public T Value;
+ public Exception Exception;
+
+ public bool Ok => this.Exception == null;
+ public bool Fail => this.Exception != null;
+
+ ///
+ /// Get array result or throw exception if there is any error on read result
+ ///
+ public T GetValue() => this.Ok ? this.Value : throw this.Exception;
+
+ public Result(T value, Exception ex = null)
+ {
+ this.Value = value;
+ this.Exception = ex;
+ }
+
+
+ public static implicit operator T(Result value)
+ {
+ return value.Value;
+ }
+
+ public static implicit operator Result(T value)
+ {
+ return new Result(value, null);
+ }
+ }
+}
\ No newline at end of file
diff --git a/LiteDB/Utils/TryCatch.cs b/LiteDB/Utils/TryCatch.cs
new file mode 100644
index 000000000..30bdefa49
--- /dev/null
+++ b/LiteDB/Utils/TryCatch.cs
@@ -0,0 +1,39 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Runtime.InteropServices;
+using System.Text;
+using System.Threading.Tasks;
+
+namespace LiteDB.Utils
+{
+ internal class TryCatch
+ {
+ public readonly List Exceptions = new List();
+
+ public TryCatch()
+ {
+ }
+
+ public TryCatch(Exception initial)
+ {
+ this.Exceptions.Add(initial);
+ }
+
+ public bool InvalidDatafileState => this.Exceptions.Any(ex =>
+ ex is LiteException liteEx &&
+ liteEx.ErrorCode == LiteException.INVALID_DATAFILE_STATE);
+
+ public void Catch(Action action)
+ {
+ try
+ {
+ action();
+ }
+ catch (Exception ex)
+ {
+ this.Exceptions.Add(ex);
+ }
+ }
+ }
+}