diff --git a/src/SeqCli/Forwarder/Filesystem/StoreDirectory.cs b/src/SeqCli/Forwarder/Filesystem/StoreDirectory.cs
index f6f34d36..bc5d018f 100644
--- a/src/SeqCli/Forwarder/Filesystem/StoreDirectory.cs
+++ b/src/SeqCli/Forwarder/Filesystem/StoreDirectory.cs
@@ -34,6 +34,15 @@ protected virtual (string, StoreFile) CreateTemporary()
var tmpName = $"rc{Guid.NewGuid():N}.tmp";
return (tmpName, Create(tmpName));
}
+
+ ///
+ /// Create a new file with the given contents.
+ ///
+ public virtual StoreFile Create(string name, Span contents)
+ {
+ Create(name);
+ return ReplaceContents(name, contents);
+ }
///
/// Delete a file with the given name, returning whether the file was deleted.
@@ -43,7 +52,7 @@ protected virtual (string, StoreFile) CreateTemporary()
///
/// Atomically replace the contents of one file with another, creating it if it doesn't exist and deleting the other.
///
- public abstract StoreFile Replace(string toReplace, string replaceWith);
+ public abstract StoreFile Replace(string destinationPath, string sourcePath);
///
/// Atomically replace the contents of a file.
@@ -73,10 +82,18 @@ public virtual StoreFile ReplaceContents(string name, Span contents, bool
}
///
- /// List all files in unspecified order.
+ /// List all files matching the given predicate in unspecified order.
///
public abstract IEnumerable<(string Name, StoreFile File)> List(Func predicate);
+ ///
+ /// List all files in unspecified order.
+ ///
+ public virtual IEnumerable<(string Name, StoreFile File)> ListAll()
+ {
+ return List(_ => true);
+ }
+
///
/// Try get a file by name.
///
diff --git a/src/SeqCli/Forwarder/Filesystem/StoreFile.cs b/src/SeqCli/Forwarder/Filesystem/StoreFile.cs
index 1beed35f..5a189d5a 100644
--- a/src/SeqCli/Forwarder/Filesystem/StoreFile.cs
+++ b/src/SeqCli/Forwarder/Filesystem/StoreFile.cs
@@ -43,6 +43,23 @@ public virtual long CopyContentsTo(Span buffer)
return reader.CopyTo(buffer);
}
+ ///
+ /// Append the contents of the supplied buffer to the end of the file.
+ ///
+ public virtual void Append(Span buffer)
+ {
+ if (!TryOpenAppend(out var opened))
+ {
+ throw new Exception("Failed to open the file for appending");
+ }
+
+ using var appender = opened;
+
+ appender.Append(buffer);
+ appender.Commit();
+ appender.Sync();
+ }
+
///
/// Try open a reader to the file.
///
diff --git a/src/SeqCli/Forwarder/Filesystem/System/SystemStoreDirectory.cs b/src/SeqCli/Forwarder/Filesystem/System/SystemStoreDirectory.cs
index 695666d0..85c7e058 100644
--- a/src/SeqCli/Forwarder/Filesystem/System/SystemStoreDirectory.cs
+++ b/src/SeqCli/Forwarder/Filesystem/System/SystemStoreDirectory.cs
@@ -102,11 +102,11 @@ public override bool TryDelete(string name)
}
}
- public override SystemStoreFile Replace(string toReplace, string replaceWith)
+ public override SystemStoreFile Replace(string destinationPath, string sourcePath)
{
- var filePath = Path.Combine(_directoryPath, toReplace);
+ var filePath = Path.Combine(_directoryPath, destinationPath);
- File.Replace(Path.Combine(_directoryPath, replaceWith), filePath, null);
+ File.Replace(Path.Combine(_directoryPath, sourcePath), filePath, null);
return new SystemStoreFile(filePath);
}
diff --git a/src/SeqCli/Forwarder/Storage/BufferReader.cs b/src/SeqCli/Forwarder/Storage/BufferReader.cs
index 7a4721c9..8c902905 100644
--- a/src/SeqCli/Forwarder/Storage/BufferReader.cs
+++ b/src/SeqCli/Forwarder/Storage/BufferReader.cs
@@ -71,80 +71,107 @@ Scan through the offending chunk until a newline delimiter is found.
2. After discarding, attempt to fill a buffer with as much data as possible
from the underlying chunks.
*/
+
+ var chunkIndex = 0;
if (_discardingHead != null)
{
+ // We're discarding an oversize payload
var discardingRentedArray = ArrayPool.Shared.Rent(maxSize);
// NOTE: We don't use `maxSize` here, because we're discarding these bytes
// so it doesn't matter what size the target array is
var discardingBatchBuffer = discardingRentedArray.AsSpan();
- while (_discardingHead != null)
+ while (_discardingHead != null && chunkIndex < _sortedChunks.Count)
{
- var chunk = _sortedChunks[0];
+ var chunk = _sortedChunks[chunkIndex];
- // If the chunk has changed (it may have been deleted externally)
+ // If the first chunk has changed (it may have been deleted externally)
// then stop discarding
if (chunk.Name.Id != _discardingHead.Value.ChunkId)
{
_discardingHead = null;
- ArrayPool.Shared.Return(discardingRentedArray);
break;
}
- var chunkHead = Extents(chunk);
+ // Try read to the end of the chunk
+ //
+ // If reading the chunk length fails then advance over it
+ if (!chunk.File.TryGetLength(out var length))
+ {
+ chunkIndex += 1;
+
+ continue;
+ }
+
+ var chunkHead = new BufferReaderChunkExtents(Math.Min(length.Value, _discardingHead.Value.Offset), length.Value);
// Attempt to fill the buffer with data from the underlying chunk
+ //
+ // If reading from the chunk fails then advance over it
if (!TryFillChunk(chunk,
- chunkHead with { CommitHead = _discardingHead.Value.Offset },
+ chunkHead,
discardingBatchBuffer,
- out var fill))
+ out var filled))
{
- // If attempting to read from the chunk fails then remove it and carry on
- // This is also done below in the regular read-loop if reading fails
- _sortedChunks.RemoveAt(0);
- _discardingHead = null;
+ chunkIndex += 1;
- ArrayPool.Shared.Return(discardingRentedArray);
- break;
+ continue;
}
// Scan forwards for the next newline
- var firstNewlineIndex = discardingBatchBuffer[..fill.Value].IndexOf((byte)'\n');
-
- // If a newline was found then advance the reader to it and stop discarding
- if (firstNewlineIndex >= 0) fill = firstNewlineIndex + 1;
+ var firstNewlineIndex = discardingBatchBuffer[..filled.Value].IndexOf((byte)'\n');
+ if (firstNewlineIndex >= 0) filled = firstNewlineIndex + 1;
_discardingHead = _discardingHead.Value with
{
- Offset = _discardingHead.Value.Offset + fill.Value
+ Offset = _discardingHead.Value.Offset + filled.Value
};
_readHead = _discardingHead.Value;
- var isChunkFinished = _discardingHead.Value.Offset == chunkHead.WriteHead;
-
- // If the chunk is finished or a newline is found then stop discarding
- if (firstNewlineIndex >= 0 || (isChunkFinished && _sortedChunks.Count > 1))
+ // If a newline was found then advance the reader to it and stop discarding
+ if (firstNewlineIndex >= 0)
{
_discardingHead = null;
- ArrayPool.Shared.Return(discardingRentedArray);
break;
}
+
+ var isChunkFinished = chunkHead.CommitHead + filled == chunkHead.WriteHead;
+
+ // If we've discarded to the end of the chunk then update our state from the disk and return
+ //
+ // The next time we attempt to fill a chunk we'll resume from this point.
+ if (isChunkFinished)
+ {
+ // If there's no way new data can arrive to complete this event then advance over it.
+ // If the chunk is the last one then it's considered actively writable, and so we
+ // presume we're seeing a torn write here.
+ //
+ // A future sync from the files on disk will delete it.
+ if (_sortedChunks.Count > 1)
+ {
+ _discardingHead = null;
- // If there's more data in the chunk to read then loop back through
- if (!isChunkFinished) continue;
-
- // If the chunk is finished but a newline wasn't found then refresh
- // our set of chunks and loop back through
- ReadChunks();
+ break;
+ }
+
+ // There's only a single chunk, update our state from the disk in case the writer
+ // has moved on to another chunk and return. We may end up coming back later and
+ // reading more to discard.
+ ReadChunks();
- ArrayPool.Shared.Return(discardingRentedArray);
- batch = null;
- return false;
+ ArrayPool.Shared.Return(discardingRentedArray);
+ batch = null;
+ return false;
+ }
}
+
+ ReadChunks();
+
+ ArrayPool.Shared.Return(discardingRentedArray);
}
// Fill a buffer with newline-delimited values
@@ -154,45 +181,69 @@ from the underlying chunks.
var batchLength = 0;
BufferPosition? batchHead = null;
- var chunkIndex = 0;
// Try fill the buffer with as much data as possible
// by walking over all chunks
while (chunkIndex < _sortedChunks.Count)
{
var chunk = _sortedChunks[chunkIndex];
- var chunkHead = Extents(chunk);
+
+ BufferReaderChunkExtents chunkHead;
+ if (chunk.Name.Id == _readHead.ChunkId)
+ {
+ // The chunk is the one we're currently reading; resume from where we left off
+ // If the file was truncated externally then we'll treat it as complete
+ chunkHead = chunk.File.TryGetLength(out var length)
+ ? new BufferReaderChunkExtents(Math.Min(_readHead.Offset, length.Value), length.Value)
+ : new BufferReaderChunkExtents(_readHead.Offset, _readHead.Offset);
+ }
+ else
+ {
+ // The chunk is not the one we've been reading; start from the beginning
+ chunk.File.TryGetLength(out var length);
+ chunkHead = new BufferReaderChunkExtents(0, length ?? 0);
+ }
- if (!TryFillChunk(chunk, chunkHead, batchBuffer[batchLength..], out var fill))
+ if (!TryFillChunk(chunk, chunkHead, batchBuffer[batchLength..], out var filled))
{
- // If we can't read from this chunk anymore then remove it and continue
- _sortedChunks.RemoveAt(chunkIndex);
+ // If we can't read from this chunk anymore then step over it
+ chunkIndex += 1;
continue;
}
- var isBufferFull = batchLength + fill == maxSize;
- var isChunkFinished = fill == chunkHead.WriteHead;
+ var isBufferFull = batchLength + filled == maxSize;
+ var isChunkFinished = chunkHead.CommitHead + filled == chunkHead.WriteHead;
- // If either the buffer has been filled or we've reached the end of a chunk
- // then scan to the last newline
+ // If either the buffer has been filled or we've reached the end of the chunk
+ // then scan backwards to the last newline delimiter
if (isBufferFull || isChunkFinished)
{
- // If the chunk is finished then we expect this to immediately find a trailing newline
+ // If the chunk is valid and finished then we expect this to immediately find a trailing newline
// NOTE: `Span.LastIndexOf` and similar methods are vectorized
- var lastNewlineIndex = batchBuffer[batchLength..(batchLength + fill.Value)].LastIndexOf((byte)'\n');
+ var lastNewlineIndex = batchBuffer[batchLength..(batchLength + filled.Value)].LastIndexOf((byte)'\n');
if (lastNewlineIndex == -1)
{
- // If this isn't the last chunk then discard the trailing data and move on
+ // The data we wrote didn't contain any newline delimiters
+
+ // If there's no way new data can arrive to complete this event then advance over it.
+ // If the chunk is the last one then it's considered actively writable, and so we
+ // presume we're seeing a torn write here.
+ //
+ // A subsequent attempt to fill will overwrite the incomplete data in it, and
+ // a future sync from the files on disk will delete it
if (isChunkFinished && chunkIndex < _sortedChunks.Count)
{
chunkIndex += 1;
continue;
}
- // If this is the first chunk then we've hit an oversize payload
+ // If we're looking at the first chunk then start discarding
+ //
+ // We'll hit this point if we happen to start from an oversized payload, or if our last attempt
+ // to fill a batch advanced up to an oversize event
if (chunkIndex == 0)
{
- _discardingHead = new BufferPosition(chunk.Name.Id, chunkHead.CommitHead + fill.Value);
+ _discardingHead = new BufferPosition(chunk.Name.Id, chunkHead.CommitHead + filled.Value);
// Ensures we don't attempt to yield the data we've read
batchHead = null;
@@ -202,11 +253,12 @@ from the underlying chunks.
break;
}
- fill = lastNewlineIndex + 1;
+ // Only consider the read data up to the last newline
+ filled = lastNewlineIndex + 1;
}
- batchLength += fill.Value;
- batchHead = new BufferPosition(chunk.Name.Id, chunkHead.CommitHead + fill.Value);
+ batchLength += filled.Value;
+ batchHead = new BufferPosition(chunk.Name.Id, chunkHead.CommitHead + filled.Value);
chunkIndex += 1;
}
@@ -245,6 +297,7 @@ public void AdvanceTo(BufferPosition newReaderHead)
// The remainder of the chunk is being skipped
if (chunk.Name.Id < newReaderHead.ChunkId)
{
+ chunk.Dispose();
_storeDirectory.TryDelete(chunk.Name.ToString());
}
else
@@ -261,17 +314,12 @@ public void AdvanceTo(BufferPosition newReaderHead)
_sortedChunks.RemoveRange(0, removeLength);
}
- BufferReaderChunkExtents Extents(BufferReaderChunk chunk)
- {
- if (chunk.Name.Id == _readHead.ChunkId)
- return chunk.Chunk.TryGetLength(out var writeHead)
- ? new BufferReaderChunkExtents(Math.Min(_readHead.Offset, writeHead.Value), writeHead.Value)
- : new BufferReaderChunkExtents(_readHead.Offset, _readHead.Offset);
-
- chunk.Chunk.TryGetLength(out var length);
- return new BufferReaderChunkExtents(0, length ?? 0);
- }
-
+ ///
+ /// Read the current state of the store from files on disk.
+ ///
+ ///
+ /// This method will delete any files it finds before the current read head.
+ ///
void ReadChunks()
{
List chunks = [];
diff --git a/src/SeqCli/Forwarder/Storage/BufferReaderChunk.cs b/src/SeqCli/Forwarder/Storage/BufferReaderChunk.cs
index d85cb20e..d7cf5168 100644
--- a/src/SeqCli/Forwarder/Storage/BufferReaderChunk.cs
+++ b/src/SeqCli/Forwarder/Storage/BufferReaderChunk.cs
@@ -22,14 +22,14 @@ namespace SeqCli.Forwarder.Storage;
///
class BufferReaderChunk : IDisposable
{
- public BufferReaderChunk(ChunkName name, StoreFile chunk)
+ public BufferReaderChunk(ChunkName name, StoreFile file)
{
Name = name;
- Chunk = chunk;
+ File = file;
}
public ChunkName Name { get; }
- public StoreFile Chunk { get; }
+ public StoreFile File { get; }
(long, StoreFileReader)? _reader;
@@ -53,7 +53,7 @@ public bool TryCopyTo(Span buffer, BufferReaderChunkExtents chunkExtents,
if (_reader == null)
{
- if (!Chunk.TryOpenRead(chunkExtents.WriteHead, out var reader)) return false;
+ if (!File.TryOpenRead(chunkExtents.WriteHead, out var reader)) return false;
_reader = (chunkExtents.WriteHead, reader);
}
diff --git a/src/SeqCli/Forwarder/Storage/BufferReaderChunkExtents.cs b/src/SeqCli/Forwarder/Storage/BufferReaderChunkExtents.cs
index 5cc37a69..5a101b2a 100644
--- a/src/SeqCli/Forwarder/Storage/BufferReaderChunkExtents.cs
+++ b/src/SeqCli/Forwarder/Storage/BufferReaderChunkExtents.cs
@@ -12,12 +12,35 @@
// See the License for the specific language governing permissions and
// limitations under the License.
+using System;
+
namespace SeqCli.Forwarder.Storage;
///
/// The current read and write positions in a .
///
-readonly record struct BufferReaderChunkExtents(long CommitHead, long WriteHead)
+readonly record struct BufferReaderChunkExtents
{
- public long Unadvanced => WriteHead - CommitHead;
+ public BufferReaderChunkExtents(long commitHead, long writeHead)
+ {
+ if (commitHead > writeHead)
+ {
+ throw new ArgumentOutOfRangeException(nameof(CommitHead), "The commit head cannot be greater than the write head");
+ }
+
+ CommitHead = commitHead;
+ WriteHead = writeHead;
+ }
+
+ ///
+ /// The inclusive start of the range.
+ ///
+ public long CommitHead { get; }
+
+ ///
+ /// The exclusive end of the range.
+ ///
+ public long WriteHead { get; }
+
+ public long Unadvanced => Math.Max(0, WriteHead - CommitHead);
}
diff --git a/test/SeqCli.Tests/Forwarder/Filesystem/InMemoryStoreDirectory.cs b/test/SeqCli.Tests/Forwarder/Filesystem/InMemoryStoreDirectory.cs
index 00b1762d..4cf94086 100644
--- a/test/SeqCli.Tests/Forwarder/Filesystem/InMemoryStoreDirectory.cs
+++ b/test/SeqCli.Tests/Forwarder/Filesystem/InMemoryStoreDirectory.cs
@@ -20,7 +20,7 @@ public override InMemoryStoreFile Create(string name)
return _files[name];
}
- public InMemoryStoreFile Create(string name, Span contents)
+ public override InMemoryStoreFile Create(string name, Span contents)
{
var file = Create(name);
file.Append(contents);
@@ -30,15 +30,26 @@ public InMemoryStoreFile Create(string name, Span contents)
public override bool TryDelete(string name)
{
+ var existing = _files[name];
+
+ if (!existing.CanDelete())
+ {
+ throw new InvalidOperationException($"Cannot delete {name} with active handles");
+ }
+
return _files.Remove(name);
}
- public override InMemoryStoreFile Replace(string toReplace, string replaceWith)
+ public override InMemoryStoreFile Replace(string destinationPath, string sourcePath)
{
- _files[toReplace] = _files[replaceWith];
- _files.Remove(replaceWith);
+ _files[destinationPath] = _files[sourcePath];
+
+ if (!TryDelete(sourcePath))
+ {
+ throw new InvalidOperationException($"Failed to replace {destinationPath} with {sourcePath}");
+ }
- return _files[toReplace];
+ return _files[destinationPath];
}
public override IEnumerable<(string Name, StoreFile File)> List(Func predicate)
diff --git a/test/SeqCli.Tests/Forwarder/Filesystem/InMemoryStoreFile.cs b/test/SeqCli.Tests/Forwarder/Filesystem/InMemoryStoreFile.cs
index 26be3beb..9d44b84f 100644
--- a/test/SeqCli.Tests/Forwarder/Filesystem/InMemoryStoreFile.cs
+++ b/test/SeqCli.Tests/Forwarder/Filesystem/InMemoryStoreFile.cs
@@ -10,13 +10,16 @@ class InMemoryStoreFile : StoreFile
{
public byte[] Contents { get; private set; } = Array.Empty();
+ int _activeReaders;
+ int _activeWriters;
+
public override bool TryGetLength([NotNullWhen(true)] out long? length)
{
length = Contents.Length;
return true;
}
- public void Append(Span incoming)
+ public override void Append(Span incoming)
{
var newContents = new byte[Contents.Length + incoming.Length];
@@ -29,12 +32,36 @@ public void Append(Span incoming)
public override bool TryOpenRead(long length, [NotNullWhen(true)] out StoreFileReader? reader)
{
reader = new InMemoryStoreFileReader(this, (int)length);
+ _activeReaders++;
+
return true;
}
+ internal void CloseReader()
+ {
+ _activeReaders--;
+ }
+
public override bool TryOpenAppend([NotNullWhen(true)] out StoreFileAppender? appender)
{
+ if (_activeWriters != 0)
+ {
+ throw new InvalidOperationException("Attempt to open multiple appenders to the same file");
+ }
+
appender = new InMemoryStoreFileAppender(this);
+ _activeWriters++;
+
return true;
}
+
+ internal void CloseAppender()
+ {
+ _activeWriters--;
+ }
+
+ internal bool CanDelete()
+ {
+ return _activeReaders == 0 && _activeWriters == 0;
+ }
}
diff --git a/test/SeqCli.Tests/Forwarder/Filesystem/InMemoryStoreFileAppender.cs b/test/SeqCli.Tests/Forwarder/Filesystem/InMemoryStoreFileAppender.cs
index 9b078eec..a456e91c 100644
--- a/test/SeqCli.Tests/Forwarder/Filesystem/InMemoryStoreFileAppender.cs
+++ b/test/SeqCli.Tests/Forwarder/Filesystem/InMemoryStoreFileAppender.cs
@@ -35,5 +35,6 @@ public override void Sync()
public override void Dispose()
{
+ _storeFile.CloseAppender();
}
}
diff --git a/test/SeqCli.Tests/Forwarder/Filesystem/InMemoryStoreFileReader.cs b/test/SeqCli.Tests/Forwarder/Filesystem/InMemoryStoreFileReader.cs
index d8507e6c..06a83a1e 100644
--- a/test/SeqCli.Tests/Forwarder/Filesystem/InMemoryStoreFileReader.cs
+++ b/test/SeqCli.Tests/Forwarder/Filesystem/InMemoryStoreFileReader.cs
@@ -25,5 +25,6 @@ public override long CopyTo(Span buffer, long from = 0, long? length = nul
public override void Dispose()
{
+ _storeFile.CloseReader();
}
}
diff --git a/test/SeqCli.Tests/Forwarder/Filesystem/TestFilesystem.cs b/test/SeqCli.Tests/Forwarder/Filesystem/TestFilesystem.cs
new file mode 100644
index 00000000..26c13977
--- /dev/null
+++ b/test/SeqCli.Tests/Forwarder/Filesystem/TestFilesystem.cs
@@ -0,0 +1,37 @@
+using System;
+using System.IO;
+using SeqCli.Forwarder.Filesystem;
+using SeqCli.Forwarder.Filesystem.System;
+
+namespace SeqCli.Tests.Forwarder.Filesystem;
+
+static class TestFilesystem
+{
+ public static void PermuteFilesystem(Action withDirectory)
+ {
+ try
+ {
+ withDirectory(new InMemoryStoreDirectory());
+ }
+ catch (Exception e)
+ {
+ throw new Exception("In-memory permutation failed", e);
+ }
+
+ var tmpDir = Path.Combine(Path.GetTempPath(), $"SeqCliTest_{Guid.NewGuid():X}");
+ Directory.CreateDirectory(tmpDir);
+
+ try
+ {
+ withDirectory(new SystemStoreDirectory(tmpDir));
+ }
+ catch (Exception e)
+ {
+ throw new Exception("Filesystem permutation failed", e);
+ }
+ finally
+ {
+ Directory.Delete(tmpDir, true);
+ }
+ }
+}
diff --git a/test/SeqCli.Tests/Forwarder/Storage/BookmarkTests.cs b/test/SeqCli.Tests/Forwarder/Storage/BookmarkTests.cs
index 2e94f982..9882dfb7 100644
--- a/test/SeqCli.Tests/Forwarder/Storage/BookmarkTests.cs
+++ b/test/SeqCli.Tests/Forwarder/Storage/BookmarkTests.cs
@@ -10,74 +10,78 @@ public class BookmarkTests
[Fact]
public void CreateSetGet()
{
- var directory = new InMemoryStoreDirectory();
+ TestFilesystem.PermuteFilesystem(directory =>
+ {
+ var bookmark = Bookmark.Open(directory);
- var bookmark = Bookmark.Open(directory);
+ Assert.False(bookmark.TryGet(out var value));
+ Assert.Null(value);
- Assert.False(bookmark.TryGet(out var value));
- Assert.Null(value);
+ Assert.True(bookmark.TrySet(new BufferPosition(42, 1)));
+ Assert.True(bookmark.TryGet(out value));
+ Assert.Equal(new BufferPosition(42, 1), value.Value);
- Assert.True(bookmark.TrySet(new BufferPosition(42, 1)));
- Assert.True(bookmark.TryGet(out value));
- Assert.Equal(new BufferPosition(42, 1), value.Value);
-
- Assert.True(bookmark.TrySet(new BufferPosition(42, int.MaxValue)));
- Assert.True(bookmark.TryGet(out value));
- Assert.Equal(new BufferPosition(42, int.MaxValue), value.Value);
+ Assert.True(bookmark.TrySet(new BufferPosition(42, int.MaxValue)));
+ Assert.True(bookmark.TryGet(out value));
+ Assert.Equal(new BufferPosition(42, int.MaxValue), value.Value);
+ });
}
[Fact]
public void OpenDeletesOldBookmarks()
{
- var directory = new InMemoryStoreDirectory();
-
- directory.Create($"{1L:x16}.bookmark", new BufferPosition(3, 3478).Encode());
- directory.Create($"{3L:x16}.bookmark", new BufferPosition(42, 17).Encode());
+ TestFilesystem.PermuteFilesystem(directory =>
+ {
+ directory.Create($"{1L:x16}.bookmark", new BufferPosition(3, 3478).Encode());
+ directory.Create($"{3L:x16}.bookmark", new BufferPosition(42, 17).Encode());
- Assert.Equal(2, directory.Files.Count);
+ Assert.Equal(2, directory.ListAll().Count());
- var bookmark = Bookmark.Open(directory);
+ var bookmark = Bookmark.Open(directory);
- Assert.Equal($"{3L:x16}.bookmark", directory.Files.Single().Key);
+ Assert.Equal($"{3L:x16}.bookmark", directory.ListAll().Single().Name);
- Assert.True(bookmark.TryGet(out var value));
- Assert.Equal(new BufferPosition(42, 17), value);
+ Assert.True(bookmark.TryGet(out var value));
+ Assert.Equal(new BufferPosition(42, 17), value);
+ });
}
[Fact]
public void OpenDeletesCorruptedBookmarks()
{
- var directory = new InMemoryStoreDirectory();
+ TestFilesystem.PermuteFilesystem(directory =>
+ {
+ directory.Create($"{1L:x16}.bookmark", new BufferPosition(3, 3478).Encode());
- directory.Create($"{1L:x16}.bookmark", new BufferPosition(3, 3478).Encode());
+ // This bookmark is invalid
+ directory.Create($"{3L:x16}.bookmark", new byte[] { 1, 2, 3 });
- // This bookmark is invalid
- directory.Create($"{3L:x16}.bookmark", new byte[] { 1, 2, 3 });
+ var bookmark = Bookmark.Open(directory);
- var bookmark = Bookmark.Open(directory);
+ Assert.Empty(directory.ListAll());
- Assert.Empty(directory.Files);
+ Assert.True(bookmark.TrySet(new BufferPosition(42, 17)));
- Assert.True(bookmark.TrySet(new BufferPosition(42, 17)));
-
- Assert.Equal($"{4L:x16}.bookmark", directory.Files.Single().Key);
+ Assert.Equal($"{4L:x16}.bookmark", directory.ListAll().Single().Name);
+ });
}
[Fact]
public void OpenDeletesMisnamedBookmarks()
{
- var directory = new InMemoryStoreDirectory();
-
- directory.Create($"{1L:x16}.bookmark", new BufferPosition(3, 3478).Encode());
+ TestFilesystem.PermuteFilesystem(directory =>
+ {
+ directory.Create($"{1L:x16}.bookmark", new BufferPosition(3, 3478).Encode());
- // This bookmark is invalid
- directory.Create($"ff{3L:x16}.bookmark", new BufferPosition(42, 17).Encode());
+ // This bookmark is invalid
+ directory.Create($"ff{3L:x16}.bookmark", new BufferPosition(42, 17).Encode());
- var bookmark = Bookmark.Open(directory);
+ var bookmark = Bookmark.Open(directory);
- Assert.Single(directory.Files);
+ Assert.Single(directory.ListAll());
- Assert.True(bookmark.TryGet(out var value));
- Assert.Equal(new BufferPosition(3, 3478), value);
+ Assert.True(bookmark.TryGet(out var value));
+ Assert.Equal(new BufferPosition(3, 3478), value);
+ });
}
}
diff --git a/test/SeqCli.Tests/Forwarder/Storage/BufferTests.cs b/test/SeqCli.Tests/Forwarder/Storage/BufferTests.cs
index 9b91660b..60dee141 100644
--- a/test/SeqCli.Tests/Forwarder/Storage/BufferTests.cs
+++ b/test/SeqCli.Tests/Forwarder/Storage/BufferTests.cs
@@ -1,4 +1,5 @@
using System.Linq;
+using SeqCli.Forwarder.Filesystem.System;
using SeqCli.Forwarder.Storage;
using SeqCli.Tests.Forwarder.Filesystem;
using Xunit;
@@ -10,306 +11,343 @@ public class BufferTests
[Fact]
public void OpenAppendRead()
{
- var directory = new InMemoryStoreDirectory();
-
- using var writer = BufferAppender.Open(directory);
- var reader = BufferReader.Open(directory);
-
- Assert.Empty(directory.Files);
-
- // Append a payload
- Assert.True(writer.TryAppend("{\"id\":1}\n"u8.ToArray(), long.MaxValue));
- Assert.Single(directory.Files);
-
- // Read the payload
- Assert.False(reader.TryFillBatch(10, out _));
- Assert.True(reader.TryFillBatch(10, out var batch));
- var batchBuffer = batch.Value;
- Assert.Equal(new BufferPosition(1, 9), batchBuffer.ReaderHead);
- Assert.Equal("{\"id\":1}\n"u8.ToArray(), batchBuffer.AsSpan().ToArray());
-
- // Advance the reader
- reader.AdvanceTo(batchBuffer.ReaderHead);
- Assert.False(reader.TryFillBatch(10, out batch));
-
- // Append another payload
- Assert.True(writer.TryAppend("{\"id\":2}\n"u8.ToArray(), long.MaxValue));
- Assert.Single(directory.Files);
-
- // Read the payload
- Assert.True(reader.TryFillBatch(10, out batch));
- batchBuffer = batch.Value;
- Assert.Equal(new BufferPosition(1, 18), batchBuffer.ReaderHead);
- Assert.Equal("{\"id\":2}\n"u8.ToArray(), batchBuffer.AsSpan().ToArray());
-
- // Advance the reader
- reader.AdvanceTo(batchBuffer.ReaderHead);
- Assert.False(reader.TryFillBatch(10, out batch));
+ TestFilesystem.PermuteFilesystem(directory =>
+ {
+ using var writer = BufferAppender.Open(directory);
+ var reader = BufferReader.Open(directory);
+
+ Assert.Empty(directory.ListAll());
+
+ // Append a payload
+ Assert.True(writer.TryAppend("{\"id\":1}\n"u8.ToArray(), long.MaxValue));
+ Assert.Single(directory.ListAll());
+
+ // Read the payload
+ Assert.False(reader.TryFillBatch(10, out _));
+ Assert.True(reader.TryFillBatch(10, out var batch));
+ var batchBuffer = batch.Value;
+ Assert.Equal(new BufferPosition(1, 9), batchBuffer.ReaderHead);
+ Assert.Equal("{\"id\":1}\n"u8.ToArray(), batchBuffer.AsSpan().ToArray());
+
+ // Advance the reader
+ reader.AdvanceTo(batchBuffer.ReaderHead);
+ Assert.False(reader.TryFillBatch(10, out batch));
+
+ // Append another payload
+ Assert.True(writer.TryAppend("{\"id\":2}\n"u8.ToArray(), long.MaxValue));
+ Assert.Single(directory.ListAll());
+
+ // Read the payload
+ Assert.True(reader.TryFillBatch(10, out batch));
+ batchBuffer = batch.Value;
+ Assert.Equal(new BufferPosition(1, 18), batchBuffer.ReaderHead);
+ Assert.Equal("{\"id\":2}\n"u8.ToArray(), batchBuffer.AsSpan().ToArray());
+
+ // Advance the reader
+ reader.AdvanceTo(batchBuffer.ReaderHead);
+ Assert.False(reader.TryFillBatch(10, out batch));
+ });
}
[Fact]
public void ReadWaitsUntilCompleteDataOnLastChunk()
{
- var directory = new InMemoryStoreDirectory();
+ TestFilesystem.PermuteFilesystem(directory =>
+ {
+ var chunk = directory.Create(new ChunkName(1).ToString(), "{\"id\":1"u8.ToArray());
- var chunk = directory.Create(new ChunkName(1).ToString(), "{\"id\":1"u8.ToArray());
+ var reader = BufferReader.Open(directory);
- var reader = BufferReader.Open(directory);
+ Assert.False(reader.TryFillBatch(512, out _));
- Assert.False(reader.TryFillBatch(512, out _));
+ chunk.Append("}"u8.ToArray());
- chunk.Append("}"u8.ToArray());
+ Assert.False(reader.TryFillBatch(512, out _));
- Assert.False(reader.TryFillBatch(512, out _));
+ chunk.Append("\n"u8.ToArray());
- chunk.Append("\n"u8.ToArray());
+ Assert.True(reader.TryFillBatch(512, out var batch));
+ var batchBuffer = batch.Value;
- Assert.True(reader.TryFillBatch(512, out var batch));
- var batchBuffer = batch.Value;
-
- Assert.Equal(new BufferPosition(1, 9), batchBuffer.ReaderHead);
- Assert.Equal("{\"id\":1}\n"u8.ToArray(), batchBuffer.AsSpan().ToArray());
+ Assert.Equal(new BufferPosition(1, 9), batchBuffer.ReaderHead);
+ Assert.Equal("{\"id\":1}\n"u8.ToArray(), batchBuffer.AsSpan().ToArray());
+ });
}
[Fact]
public void ReadDiscardsPreviouslyReadChunks()
{
- var directory = new InMemoryStoreDirectory();
-
- directory.Create(new ChunkName(1).ToString(), "{\"id\":1}\n"u8.ToArray());
- directory.Create(new ChunkName(2).ToString(), "{\"id\":2}\n"u8.ToArray());
+ TestFilesystem.PermuteFilesystem(directory =>
+ {
+ directory.Create(new ChunkName(1).ToString(), "{\"id\":1}\n"u8.ToArray());
+ directory.Create(new ChunkName(2).ToString(), "{\"id\":2}\n"u8.ToArray());
- var reader = BufferReader.Open(directory);
+ var reader = BufferReader.Open(directory);
- Assert.True(reader.TryFillBatch(512, out var batch));
- var batchBuffer = batch.Value;
- Assert.Equal(new BufferPosition(2, 9), batchBuffer.ReaderHead);
- Assert.Equal("{\"id\":1}\n{\"id\":2}\n"u8.ToArray(), batchBuffer.AsSpan().ToArray());
+ Assert.True(reader.TryFillBatch(512, out var batch));
+ var batchBuffer = batch.Value;
+ Assert.Equal(new BufferPosition(2, 9), batchBuffer.ReaderHead);
+ Assert.Equal("{\"id\":1}\n{\"id\":2}\n"u8.ToArray(), batchBuffer.AsSpan().ToArray());
- Assert.Equal(2, directory.Files.Count);
+ Assert.Equal(2, directory.ListAll().Count());
- reader.AdvanceTo(batchBuffer.ReaderHead);
+ reader.AdvanceTo(batchBuffer.ReaderHead);
- Assert.Single(directory.Files);
+ Assert.Single(directory.ListAll());
- directory.Create(new ChunkName(1).ToString(), "{\"id\":1}\n"u8.ToArray());
- directory.Create(new ChunkName(3).ToString(), "{\"id\":3}\n"u8.ToArray());
+ directory.Create(new ChunkName(1).ToString(), "{\"id\":1}\n"u8.ToArray());
+ directory.Create(new ChunkName(3).ToString(), "{\"id\":3}\n"u8.ToArray());
- Assert.Equal(3, directory.Files.Count);
+ Assert.Equal(3, directory.ListAll().Count());
- Assert.False(reader.TryFillBatch(512, out _));
- Assert.True(reader.TryFillBatch(512, out batch));
- batchBuffer = batch.Value;
- Assert.Equal(new BufferPosition(3, 9), batchBuffer.ReaderHead);
- Assert.Equal("{\"id\":3}\n"u8.ToArray(), batchBuffer.AsSpan().ToArray());
+ Assert.False(reader.TryFillBatch(512, out _));
+ Assert.True(reader.TryFillBatch(512, out batch));
+ batchBuffer = batch.Value;
+ Assert.Equal(new BufferPosition(3, 9), batchBuffer.ReaderHead);
+ Assert.Equal("{\"id\":3}\n"u8.ToArray(), batchBuffer.AsSpan().ToArray());
- reader.AdvanceTo(batchBuffer.ReaderHead);
+ reader.AdvanceTo(batchBuffer.ReaderHead);
- Assert.Single(directory.Files);
+ Assert.Single(directory.ListAll());
+ });
}
[Fact]
public void AdvancingToNonexistentLowerPositionRereadsAllChunks()
{
- var directory = new InMemoryStoreDirectory();
-
- directory.Create(new ChunkName(71).ToString(), "{\"id\":1}\n"u8.ToArray());
- directory.Create(new ChunkName(72).ToString(), "{\"id\":2}\n"u8.ToArray());
-
- var reader = BufferReader.Open(directory);
-
- reader.AdvanceTo(new BufferPosition(60, 0));
-
- Assert.True(reader.TryFillBatch(512, out var batch));
- var batchBuffer = batch.Value;
- Assert.Equal(new BufferPosition(72, 9), batchBuffer.ReaderHead);
+ TestFilesystem.PermuteFilesystem(directory =>
+ {
+ directory.Create(new ChunkName(71).ToString(), "{\"id\":1}\n"u8.ToArray());
+ directory.Create(new ChunkName(72).ToString(), "{\"id\":2}\n"u8.ToArray());
+
+ var reader = BufferReader.Open(directory);
+
+ reader.AdvanceTo(new BufferPosition(60, 0));
+
+ Assert.True(reader.TryFillBatch(512, out var batch));
+ var batchBuffer = batch.Value;
+ Assert.Equal(new BufferPosition(72, 9), batchBuffer.ReaderHead);
+ });
}
[Fact]
public void AdvancingToNonexistentHigherPositionDiscardsAllChunks()
{
- var directory = new InMemoryStoreDirectory();
+ TestFilesystem.PermuteFilesystem(directory =>
+ {
+ directory.Create(new ChunkName(71).ToString(), "{\"id\":1}\n"u8.ToArray());
+ directory.Create(new ChunkName(72).ToString(), "{\"id\":2}\n"u8.ToArray());
+
+ var reader = BufferReader.Open(directory);
- directory.Create(new ChunkName(71).ToString(), "{\"id\":1}\n"u8.ToArray());
- directory.Create(new ChunkName(72).ToString(), "{\"id\":2}\n"u8.ToArray());
+ reader.AdvanceTo(new BufferPosition(80, 0));
- var reader = BufferReader.Open(directory);
-
- reader.AdvanceTo(new BufferPosition(80, 0));
-
- Assert.False(reader.TryFillBatch(512, out _));
+ Assert.False(reader.TryFillBatch(512, out _));
+ });
}
[Fact]
public void ReadDiscardsOversizePayloads()
{
- var directory = new InMemoryStoreDirectory();
-
- using var writer = BufferAppender.Open(directory);
+ TestFilesystem.PermuteFilesystem(directory =>
+ {
+ using var writer = BufferAppender.Open(directory);
- Assert.True(writer.TryAppend("{\"id\":1}\n"u8.ToArray(), long.MaxValue));
+ Assert.True(writer.TryAppend("{\"id\":1}\n"u8.ToArray(), long.MaxValue));
- var reader = BufferReader.Open(directory);
+ var reader = BufferReader.Open(directory);
- Assert.False(reader.TryFillBatch(5, out _));
- Assert.False(reader.TryFillBatch(512, out _));
+ Assert.False(reader.TryFillBatch(5, out _));
+ Assert.False(reader.TryFillBatch(512, out _));
+ });
}
[Fact]
public void ReadDoesNotDiscardAcrossFiles()
{
- var directory = new InMemoryStoreDirectory();
+ TestFilesystem.PermuteFilesystem(directory =>
+ {
+ directory.Create(new ChunkName(1).ToString(), "{\"id\":1}"u8.ToArray());
+ directory.Create(new ChunkName(2).ToString(), "{\"id\":2}\n"u8.ToArray());
- directory.Create(new ChunkName(1).ToString(), "{\"id\":1}"u8.ToArray());
- directory.Create(new ChunkName(2).ToString(), "{\"id\":2}\n"u8.ToArray());
+ var reader = BufferReader.Open(directory);
- var reader = BufferReader.Open(directory);
+ Assert.True(reader.TryFillBatch(512, out var batch));
+ var batchBuffer = batch.Value;
- Assert.True(reader.TryFillBatch(512, out var batch));
- var batchBuffer = batch.Value;
-
- Assert.Equal(new BufferPosition(2, 9), batchBuffer.ReaderHead);
- Assert.Equal("{\"id\":2}\n"u8.ToArray(), batchBuffer.AsSpan().ToArray());
+ Assert.Equal(new BufferPosition(2, 9), batchBuffer.ReaderHead);
+ Assert.Equal("{\"id\":2}\n"u8.ToArray(), batchBuffer.AsSpan().ToArray());
+ });
}
[Fact]
public void ReadStopsDiscardingOnExternalDelete()
{
- var directory = new InMemoryStoreDirectory();
+ TestFilesystem.PermuteFilesystem(directory =>
+ {
+ directory.Create(new ChunkName(1).ToString(), "{\"id\":1}"u8.ToArray());
+
+ var reader = BufferReader.Open(directory);
+
+ Assert.False(reader.TryFillBatch(5, out _));
+
+ // Deleting the file here will cause our discarding chunk to change
+ Assert.True(directory.TryDelete(new ChunkName(1).ToString()));
+ directory.Create(new ChunkName(2).ToString(), "{\"id\":2}\n"u8.ToArray());
- directory.Create(new ChunkName(1).ToString(), "{\"id\":1}"u8.ToArray());
+ Assert.False(reader.TryFillBatch(512, out _));
+ Assert.True(reader.TryFillBatch(512, out var batch));
+ var batchBuffer = batch.Value;
+
+ Assert.Equal(new BufferPosition(2, 9), batchBuffer.ReaderHead);
+ Assert.Equal("{\"id\":2}\n"u8.ToArray(), batchBuffer.AsSpan().ToArray());
+ });
+ }
+
+ [Fact]
+ public void ReadStopsDiscardingOnExternalTruncate()
+ {
+ TestFilesystem.PermuteFilesystem(directory =>
+ {
+ directory.Create(new ChunkName(1).ToString(), "{\"id\":1}"u8.ToArray());
- var reader = BufferReader.Open(directory);
+ var reader = BufferReader.Open(directory);
- Assert.False(reader.TryFillBatch(5, out _));
+ Assert.False(reader.TryFillBatch(5, out _));
- // Deleting the file here will cause our discarding chunk to change
- Assert.True(directory.TryDelete(new ChunkName(1).ToString()));
- directory.Create(new ChunkName(2).ToString(), "{\"id\":2}\n"u8.ToArray());
+ // Changing the length of the file here will cause our discarding chunk to change
+ directory.ReplaceContents(new ChunkName(1).ToString(), "{}"u8.ToArray());
+ directory.Create(new ChunkName(2).ToString(), "{\"id\":2}\n"u8.ToArray());
- Assert.False(reader.TryFillBatch(512, out _));
- Assert.True(reader.TryFillBatch(512, out var batch));
- var batchBuffer = batch.Value;
+ Assert.False(reader.TryFillBatch(512, out _));
+ Assert.True(reader.TryFillBatch(512, out var batch));
+ var batchBuffer = batch.Value;
- Assert.Equal(new BufferPosition(2, 9), batchBuffer.ReaderHead);
- Assert.Equal("{\"id\":2}\n"u8.ToArray(), batchBuffer.AsSpan().ToArray());
+ Assert.Equal(new BufferPosition(2, 9), batchBuffer.ReaderHead);
+ Assert.Equal("{\"id\":2}\n"u8.ToArray(), batchBuffer.AsSpan().ToArray());
+ });
}
[Fact]
public void ReadStopsDiscardingOnExternalCreate()
{
- var directory = new InMemoryStoreDirectory();
+ TestFilesystem.PermuteFilesystem(directory =>
+ {
+ directory.Create(new ChunkName(1).ToString(), "{\"id\":1}"u8.ToArray());
- directory.Create(new ChunkName(1).ToString(), "{\"id\":1}"u8.ToArray());
+ var reader = BufferReader.Open(directory);
- var reader = BufferReader.Open(directory);
+ Assert.False(reader.TryFillBatch(5, out _));
- Assert.False(reader.TryFillBatch(5, out _));
+ // Creating a new file here will stop discarding
+ directory.Create(new ChunkName(2).ToString(), "{\"id\":2}\n"u8.ToArray());
- // Creating a new file here will cause a new one to be created
- directory.Create(new ChunkName(2).ToString(), "{\"id\":2}\n"u8.ToArray());
+ Assert.False(reader.TryFillBatch(512, out _));
+ Assert.True(reader.TryFillBatch(512, out var batch));
+ var batchBuffer = batch.Value;
- Assert.False(reader.TryFillBatch(512, out _));
- Assert.True(reader.TryFillBatch(512, out var batch));
- var batchBuffer = batch.Value;
-
- Assert.Equal(new BufferPosition(2, 9), batchBuffer.ReaderHead);
- Assert.Equal("{\"id\":2}\n"u8.ToArray(), batchBuffer.AsSpan().ToArray());
+ Assert.Equal(new BufferPosition(2, 9), batchBuffer.ReaderHead);
+ Assert.Equal("{\"id\":2}\n"u8.ToArray(), batchBuffer.AsSpan().ToArray());
+ });
}
[Fact]
public void AppendRolloverOnWrite()
{
- var directory = new InMemoryStoreDirectory();
-
- using var writer = BufferAppender.Open(directory);
- var reader = BufferReader.Open(directory);
+ TestFilesystem.PermuteFilesystem(directory =>
+ {
+ using var writer = BufferAppender.Open(directory);
+ var reader = BufferReader.Open(directory);
- Assert.Empty(directory.Files);
+ Assert.Empty(directory.ListAll());
- Assert.True(writer.TryAppend("{\"id\":1}\n"u8.ToArray(), 17));
- Assert.True(writer.TryAppend("{\"id\":2}\n"u8.ToArray(), 17));
+ Assert.True(writer.TryAppend("{\"id\":1}\n"u8.ToArray(), 17));
+ Assert.True(writer.TryAppend("{\"id\":2}\n"u8.ToArray(), 17));
- Assert.Single(directory.Files);
+ Assert.Single(directory.ListAll());
- Assert.True(writer.TryAppend("{\"id\":3}\n"u8.ToArray(), 17));
+ Assert.True(writer.TryAppend("{\"id\":3}\n"u8.ToArray(), 17));
- Assert.Equal(2, directory.Files.Count);
+ Assert.Equal(2, directory.ListAll().Count());
- Assert.False(reader.TryFillBatch(512, out _));
- Assert.True(reader.TryFillBatch(512, out var batch));
- var batchBuffer = batch.Value;
+ Assert.False(reader.TryFillBatch(512, out _));
+ Assert.True(reader.TryFillBatch(512, out var batch));
+ var batchBuffer = batch.Value;
- Assert.Equal(new BufferPosition(2, 9), batchBuffer.ReaderHead);
- Assert.Equal("{\"id\":1}\n{\"id\":2}\n{\"id\":3}\n"u8.ToArray(), batchBuffer.AsSpan().ToArray());
+ Assert.Equal(new BufferPosition(2, 9), batchBuffer.ReaderHead);
+ Assert.Equal("{\"id\":1}\n{\"id\":2}\n{\"id\":3}\n"u8.ToArray(), batchBuffer.AsSpan().ToArray());
- reader.AdvanceTo(batchBuffer.ReaderHead);
+ reader.AdvanceTo(batchBuffer.ReaderHead);
- Assert.Single(directory.Files);
+ Assert.Single(directory.ListAll());
+ });
}
[Fact]
public void ExistingFilesAreReadonly()
{
- var directory = new InMemoryStoreDirectory();
+ TestFilesystem.PermuteFilesystem(directory =>
+ {
+ directory.Create(new ChunkName(0).ToString());
- directory.Create(new ChunkName(0).ToString());
+ using var writer = BufferAppender.Open(directory);
+ var reader = BufferReader.Open(directory);
- using var writer = BufferAppender.Open(directory);
- var reader = BufferReader.Open(directory);
+ Assert.Single(directory.ListAll());
- Assert.Single(directory.Files);
+ Assert.True(writer.TryAppend("{\"id\":1}\n"u8.ToArray(), long.MaxValue));
- Assert.True(writer.TryAppend("{\"id\":1}\n"u8.ToArray(), long.MaxValue));
+ Assert.Equal(2, directory.ListAll().Count());
- Assert.Equal(2, directory.Files.Count);
+ Assert.False(reader.TryFillBatch(512, out _));
+ Assert.True(reader.TryFillBatch(512, out var batch));
+ var batchBuffer = batch.Value;
- Assert.False(reader.TryFillBatch(512, out _));
- Assert.True(reader.TryFillBatch(512, out var batch));
- var batchBuffer = batch.Value;
-
- Assert.Equal(new BufferPosition(1, 9), batchBuffer.ReaderHead);
+ Assert.Equal(new BufferPosition(1, 9), batchBuffer.ReaderHead);
+ });
}
[Fact]
public void OpenReadAcrossChunks()
{
- var directory = new InMemoryStoreDirectory();
-
- directory.Create(new ChunkName(0).ToString(), "{\"id\":1}\n"u8.ToArray());
- directory.Create(new ChunkName(1).ToString(), "{\"id\":2}\n"u8.ToArray());
- directory.Create(new ChunkName(2).ToString(), "{\"id\":3}\n"u8.ToArray());
+ TestFilesystem.PermuteFilesystem(directory =>
+ {
+ directory.Create(new ChunkName(0).ToString(), "{\"id\":1}\n"u8.ToArray());
+ directory.Create(new ChunkName(1).ToString(), "{\"id\":2}\n"u8.ToArray());
+ directory.Create(new ChunkName(2).ToString(), "{\"id\":3}\n"u8.ToArray());
- var reader = BufferReader.Open(directory);
+ var reader = BufferReader.Open(directory);
- Assert.Equal(3, directory.Files.Count);
+ Assert.Equal(3, directory.ListAll().Count());
- Assert.True(reader.TryFillBatch(512, out var batch));
- var batchBuffer = batch.Value;
+ Assert.True(reader.TryFillBatch(512, out var batch));
+ var batchBuffer = batch.Value;
- Assert.Equal("{\"id\":1}\n{\"id\":2}\n{\"id\":3}\n"u8.ToArray(), batchBuffer.AsSpan().ToArray());
+ Assert.Equal("{\"id\":1}\n{\"id\":2}\n{\"id\":3}\n"u8.ToArray(), batchBuffer.AsSpan().ToArray());
- reader.AdvanceTo(batchBuffer.ReaderHead);
+ reader.AdvanceTo(batchBuffer.ReaderHead);
- Assert.Single(directory.Files);
+ Assert.Single(directory.ListAll());
+ });
}
[Fact]
public void MaxChunksOnAppender()
{
- var directory = new InMemoryStoreDirectory();
-
- using var appender = BufferAppender.Open(directory);
+ TestFilesystem.PermuteFilesystem(directory =>
+ {
+ using var appender = BufferAppender.Open(directory);
- for (var i = 0; i < 10; i++) Assert.True(appender.TryAppend("{\"id\":1}\n"u8.ToArray(), 5, 3));
+ for (var i = 0; i < 10; i++) Assert.True(appender.TryAppend("{\"id\":1}\n"u8.ToArray(), 5, 3));
- var files = directory.Files.Select(f => f.Key).ToList();
- files.Sort();
+ var files = directory.ListAll().Select(f => f.Name).ToList();
+ files.Sort();
- Assert.Equal([
- "0000000000000008.clef",
- "0000000000000009.clef",
- "000000000000000a.clef"
- ], files);
+ Assert.Equal([
+ "0000000000000008.clef",
+ "0000000000000009.clef",
+ "000000000000000a.clef"
+ ], files);
+ });
}
}