diff --git a/src/HotChocolate/Fusion/benchmarks/Fusion.Execution.Benchmarks/DbRowWriteBenchmark.cs b/src/HotChocolate/Fusion/benchmarks/Fusion.Execution.Benchmarks/DbRowWriteBenchmark.cs
new file mode 100644
index 00000000000..c380cadd8fd
--- /dev/null
+++ b/src/HotChocolate/Fusion/benchmarks/Fusion.Execution.Benchmarks/DbRowWriteBenchmark.cs
@@ -0,0 +1,387 @@
+using System;
+using System.Runtime.CompilerServices;
+using System.Runtime.InteropServices;
+using System.Runtime.Intrinsics;
+using BenchmarkDotNet.Attributes;
+
+namespace Fusion.Execution.Benchmarks;
+
+///
+/// Compares row-write strategies for each MetaDb append pattern.
+/// All benchmarks write rows of 20 bytes each to a
+/// pre-allocated buffer; we measure the per-row write cost only.
+///
+/// Patterns (mapped to real MetaDb methods):
+/// - AppendNull: 1 variable int + 4 zero ints
+/// - AppendEmptyProperty: 2 variable ints + 3 zero ints
+/// - AppendStartObject: 4 variable ints + 1 zero int
+///
+internal static class DbRowBenchData
+{
+ public const int RowSize = 20;
+ public const int Rows = 4096;
+
+ public static byte[] Buffer { get; } = new byte[Rows * RowSize];
+ public static int[] Parents { get; } = new int[Rows];
+ public static int[] SelectionIds { get; } = new int[Rows];
+ public static int[] PropertyCounts { get; } = new int[Rows];
+ public static int[] Flags { get; } = new int[Rows];
+
+ static DbRowBenchData()
+ {
+ var rng = new Random(42);
+ for (var i = 0; i < Rows; i++)
+ {
+ Parents[i] = rng.Next(0, 0x0FFFFFFF);
+ SelectionIds[i] = rng.Next(0, 0x7FFF);
+ PropertyCounts[i] = rng.Next(0, 32);
+ Flags[i] = rng.Next(0, 63);
+ }
+ }
+
+ // ---------------- AppendNull ----------------
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public static void WriteNull_FiveScalar(ref byte row, int parent)
+ {
+ Unsafe.WriteUnaligned(ref row, parent << 4);
+ Unsafe.WriteUnaligned(ref Unsafe.Add(ref row, 4), 0);
+ Unsafe.WriteUnaligned(ref Unsafe.Add(ref row, 8), 0);
+ Unsafe.WriteUnaligned(ref Unsafe.Add(ref row, 12), 0);
+ Unsafe.WriteUnaligned(ref Unsafe.Add(ref row, 16), 0);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public static void WriteNull_ScalarPlusInitBlock(ref byte row, int parent)
+ {
+ Unsafe.WriteUnaligned(ref row, parent << 4);
+ Unsafe.InitBlockUnaligned(ref Unsafe.Add(ref row, 4), 0, 16);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public static void WriteNull_Vec128PlusScalar(ref byte row, int parent)
+ {
+ // 16-byte zero-ish vector with int0 set, then trailing int zero.
+ var v = Vector128.Create(parent << 4, 0, 0, 0).AsByte();
+ v.StoreUnsafe(ref row);
+ Unsafe.WriteUnaligned(ref Unsafe.Add(ref row, 16), 0);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public static void WriteNull_ScalarPlusVec128Zero(ref byte row, int parent)
+ {
+ // Scalar write for int0, then a 16-byte Vector128 zero covering ints 1..4.
+ Unsafe.WriteUnaligned(ref row, parent << 4);
+ Vector128.Zero.StoreUnsafe(ref Unsafe.Add(ref row, 4));
+ }
+
+ // ---------------- AppendEmptyProperty ----------------
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public static void WriteEmptyProp_FiveScalar(ref byte row, int parent, int selectionId, int flags)
+ {
+ Unsafe.WriteUnaligned(ref row, 3 /*PropertyName*/ | (parent << 4));
+ Unsafe.WriteUnaligned(
+ ref Unsafe.Add(ref row, 4),
+ selectionId | (2 << 15) | (flags << 17));
+ Unsafe.WriteUnaligned(ref Unsafe.Add(ref row, 8), 0);
+ Unsafe.WriteUnaligned(ref Unsafe.Add(ref row, 12), 0);
+ Unsafe.WriteUnaligned(ref Unsafe.Add(ref row, 16), 0);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public static void WriteEmptyProp_TwoScalarPlusInitBlock(ref byte row, int parent, int selectionId, int flags)
+ {
+ Unsafe.WriteUnaligned(ref row, 3 | (parent << 4));
+ Unsafe.WriteUnaligned(
+ ref Unsafe.Add(ref row, 4),
+ selectionId | (2 << 15) | (flags << 17));
+ Unsafe.InitBlockUnaligned(ref Unsafe.Add(ref row, 8), 0, 12);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public static void WriteEmptyProp_Vec128PlusScalar(ref byte row, int parent, int selectionId, int flags)
+ {
+ var int0 = 3 | (parent << 4);
+ var int1 = selectionId | (2 << 15) | (flags << 17);
+
+ var v = Vector128.Create(int0, int1, 0, 0).AsByte();
+ v.StoreUnsafe(ref row);
+ Unsafe.WriteUnaligned(ref Unsafe.Add(ref row, 16), 0);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public static void WriteEmptyProp_TwoScalarPlusVec128Zero(ref byte row, int parent, int selectionId, int flags)
+ {
+ Unsafe.WriteUnaligned(ref row, 3 | (parent << 4));
+ Unsafe.WriteUnaligned(
+ ref Unsafe.Add(ref row, 4),
+ selectionId | (2 << 15) | (flags << 17));
+ // Covers ints 2..4 using a 16-byte zero store (overwrites 4 bytes past end, but row is 20B
+ // and buffer is row-aligned multiples of 20B — works only when room exists after).
+ // To be safe here we do two stores: Vec128.Zero at offset 8 would write 16 bytes,
+ // but only 12 are in our row. Fallback: 3 scalar zeros.
+ Unsafe.WriteUnaligned(ref Unsafe.Add(ref row, 8), 0);
+ Unsafe.WriteUnaligned(ref Unsafe.Add(ref row, 12), 0);
+ Unsafe.WriteUnaligned(ref Unsafe.Add(ref row, 16), 0);
+ }
+
+ // ---------------- AppendStartObject ----------------
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public static void WriteStartObj_FiveScalar(ref byte row, int parent, int selectionId, int propertyCount, int flags)
+ {
+ Unsafe.WriteUnaligned(ref row, 1 | (parent << 4));
+ Unsafe.WriteUnaligned(
+ ref Unsafe.Add(ref row, 4),
+ selectionId | (1 << 15) | (flags << 17));
+ Unsafe.WriteUnaligned(ref Unsafe.Add(ref row, 8), propertyCount);
+ Unsafe.WriteUnaligned(
+ ref Unsafe.Add(ref row, 12),
+ ((propertyCount * 2) + 1) & 0x07FFFFFF);
+ Unsafe.WriteUnaligned(ref Unsafe.Add(ref row, 16), 0);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public static void WriteStartObj_Struct(ref byte row, int parent, int selectionId, int propertyCount, int flags)
+ {
+ var dbRow = new DbRowLocal(
+ tokenType: 1,
+ sizeOrLength: propertyCount,
+ parentRow: parent,
+ operationReferenceId: selectionId,
+ operationReferenceType: 1,
+ numberOfRows: (propertyCount * 2) + 1,
+ flags: flags);
+ Unsafe.WriteUnaligned(ref row, dbRow);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public static void WriteStartObj_Vec128PlusScalar(ref byte row, int parent, int selectionId, int propertyCount, int flags)
+ {
+ var int0 = 1 | (parent << 4);
+ var int1 = selectionId | (1 << 15) | (flags << 17);
+ var int2 = propertyCount;
+ var int3 = ((propertyCount * 2) + 1) & 0x07FFFFFF;
+
+ var v = Vector128.Create(int0, int1, int2, int3).AsByte();
+ v.StoreUnsafe(ref row);
+ Unsafe.WriteUnaligned(ref Unsafe.Add(ref row, 16), 0);
+ }
+
+ [StructLayout(LayoutKind.Sequential)]
+ public readonly struct DbRowLocal
+ {
+ private readonly int _typeAndParent;
+ private readonly int _selectionAndFlags;
+ private readonly int _sizeOrLengthUnion;
+ private readonly int _locationOrRows;
+ private readonly int _source;
+
+ public DbRowLocal(
+ int tokenType,
+ int location = 0,
+ int sizeOrLength = 0,
+ int sourceDocumentId = 0,
+ int parentRow = 0,
+ int operationReferenceId = 0,
+ int operationReferenceType = 0,
+ int numberOfRows = 0,
+ int flags = 0)
+ {
+ var locationOrRows = location != 0 ? location : numberOfRows;
+ _typeAndParent = (tokenType & 0x0F) | (parentRow << 4);
+ _selectionAndFlags = operationReferenceId
+ | (operationReferenceType << 15)
+ | (flags << 17);
+ _sizeOrLengthUnion = sizeOrLength;
+ _locationOrRows = locationOrRows & 0x07FFFFFF;
+ _source = sourceDocumentId & 0x7FFF;
+ }
+ }
+}
+
+[MemoryDiagnoser]
+[InProcess]
+public class AppendNullWriteBenchmark
+{
+ [Benchmark(Baseline = true)]
+ public void FiveScalar()
+ {
+ ref var dest = ref MemoryMarshal.GetArrayDataReference(DbRowBenchData.Buffer);
+ ref var parents = ref MemoryMarshal.GetArrayDataReference(DbRowBenchData.Parents);
+
+ for (var i = 0; i < DbRowBenchData.Rows; i++)
+ {
+ DbRowBenchData.WriteNull_FiveScalar(
+ ref Unsafe.Add(ref dest, i * DbRowBenchData.RowSize),
+ Unsafe.Add(ref parents, i));
+ }
+ }
+
+ [Benchmark]
+ public void ScalarPlusInitBlock()
+ {
+ ref var dest = ref MemoryMarshal.GetArrayDataReference(DbRowBenchData.Buffer);
+ ref var parents = ref MemoryMarshal.GetArrayDataReference(DbRowBenchData.Parents);
+
+ for (var i = 0; i < DbRowBenchData.Rows; i++)
+ {
+ DbRowBenchData.WriteNull_ScalarPlusInitBlock(
+ ref Unsafe.Add(ref dest, i * DbRowBenchData.RowSize),
+ Unsafe.Add(ref parents, i));
+ }
+ }
+
+ [Benchmark]
+ public void Vec128PlusScalar()
+ {
+ ref var dest = ref MemoryMarshal.GetArrayDataReference(DbRowBenchData.Buffer);
+ ref var parents = ref MemoryMarshal.GetArrayDataReference(DbRowBenchData.Parents);
+
+ for (var i = 0; i < DbRowBenchData.Rows; i++)
+ {
+ DbRowBenchData.WriteNull_Vec128PlusScalar(
+ ref Unsafe.Add(ref dest, i * DbRowBenchData.RowSize),
+ Unsafe.Add(ref parents, i));
+ }
+ }
+
+ [Benchmark]
+ public void ScalarPlusVec128Zero()
+ {
+ ref var dest = ref MemoryMarshal.GetArrayDataReference(DbRowBenchData.Buffer);
+ ref var parents = ref MemoryMarshal.GetArrayDataReference(DbRowBenchData.Parents);
+
+ for (var i = 0; i < DbRowBenchData.Rows; i++)
+ {
+ DbRowBenchData.WriteNull_ScalarPlusVec128Zero(
+ ref Unsafe.Add(ref dest, i * DbRowBenchData.RowSize),
+ Unsafe.Add(ref parents, i));
+ }
+ }
+}
+
+[MemoryDiagnoser]
+[InProcess]
+public class AppendEmptyPropertyWriteBenchmark
+{
+ [Benchmark(Baseline = true)]
+ public void FiveScalar()
+ {
+ ref var dest = ref MemoryMarshal.GetArrayDataReference(DbRowBenchData.Buffer);
+ ref var parents = ref MemoryMarshal.GetArrayDataReference(DbRowBenchData.Parents);
+ ref var selIds = ref MemoryMarshal.GetArrayDataReference(DbRowBenchData.SelectionIds);
+ ref var flags = ref MemoryMarshal.GetArrayDataReference(DbRowBenchData.Flags);
+
+ for (var i = 0; i < DbRowBenchData.Rows; i++)
+ {
+ DbRowBenchData.WriteEmptyProp_FiveScalar(
+ ref Unsafe.Add(ref dest, i * DbRowBenchData.RowSize),
+ Unsafe.Add(ref parents, i),
+ Unsafe.Add(ref selIds, i),
+ Unsafe.Add(ref flags, i));
+ }
+ }
+
+ [Benchmark]
+ public void TwoScalarPlusInitBlock()
+ {
+ ref var dest = ref MemoryMarshal.GetArrayDataReference(DbRowBenchData.Buffer);
+ ref var parents = ref MemoryMarshal.GetArrayDataReference(DbRowBenchData.Parents);
+ ref var selIds = ref MemoryMarshal.GetArrayDataReference(DbRowBenchData.SelectionIds);
+ ref var flags = ref MemoryMarshal.GetArrayDataReference(DbRowBenchData.Flags);
+
+ for (var i = 0; i < DbRowBenchData.Rows; i++)
+ {
+ DbRowBenchData.WriteEmptyProp_TwoScalarPlusInitBlock(
+ ref Unsafe.Add(ref dest, i * DbRowBenchData.RowSize),
+ Unsafe.Add(ref parents, i),
+ Unsafe.Add(ref selIds, i),
+ Unsafe.Add(ref flags, i));
+ }
+ }
+
+ [Benchmark]
+ public void Vec128PlusScalar()
+ {
+ ref var dest = ref MemoryMarshal.GetArrayDataReference(DbRowBenchData.Buffer);
+ ref var parents = ref MemoryMarshal.GetArrayDataReference(DbRowBenchData.Parents);
+ ref var selIds = ref MemoryMarshal.GetArrayDataReference(DbRowBenchData.SelectionIds);
+ ref var flags = ref MemoryMarshal.GetArrayDataReference(DbRowBenchData.Flags);
+
+ for (var i = 0; i < DbRowBenchData.Rows; i++)
+ {
+ DbRowBenchData.WriteEmptyProp_Vec128PlusScalar(
+ ref Unsafe.Add(ref dest, i * DbRowBenchData.RowSize),
+ Unsafe.Add(ref parents, i),
+ Unsafe.Add(ref selIds, i),
+ Unsafe.Add(ref flags, i));
+ }
+ }
+}
+
+[MemoryDiagnoser]
+[InProcess]
+public class AppendStartObjectWriteBenchmark
+{
+ [Benchmark(Baseline = true)]
+ public void FiveScalar()
+ {
+ ref var dest = ref MemoryMarshal.GetArrayDataReference(DbRowBenchData.Buffer);
+ ref var parents = ref MemoryMarshal.GetArrayDataReference(DbRowBenchData.Parents);
+ ref var selIds = ref MemoryMarshal.GetArrayDataReference(DbRowBenchData.SelectionIds);
+ ref var counts = ref MemoryMarshal.GetArrayDataReference(DbRowBenchData.PropertyCounts);
+ ref var flags = ref MemoryMarshal.GetArrayDataReference(DbRowBenchData.Flags);
+
+ for (var i = 0; i < DbRowBenchData.Rows; i++)
+ {
+ DbRowBenchData.WriteStartObj_FiveScalar(
+ ref Unsafe.Add(ref dest, i * DbRowBenchData.RowSize),
+ Unsafe.Add(ref parents, i),
+ Unsafe.Add(ref selIds, i),
+ Unsafe.Add(ref counts, i),
+ Unsafe.Add(ref flags, i));
+ }
+ }
+
+ [Benchmark]
+ public void DbRowStruct()
+ {
+ ref var dest = ref MemoryMarshal.GetArrayDataReference(DbRowBenchData.Buffer);
+ ref var parents = ref MemoryMarshal.GetArrayDataReference(DbRowBenchData.Parents);
+ ref var selIds = ref MemoryMarshal.GetArrayDataReference(DbRowBenchData.SelectionIds);
+ ref var counts = ref MemoryMarshal.GetArrayDataReference(DbRowBenchData.PropertyCounts);
+ ref var flags = ref MemoryMarshal.GetArrayDataReference(DbRowBenchData.Flags);
+
+ for (var i = 0; i < DbRowBenchData.Rows; i++)
+ {
+ DbRowBenchData.WriteStartObj_Struct(
+ ref Unsafe.Add(ref dest, i * DbRowBenchData.RowSize),
+ Unsafe.Add(ref parents, i),
+ Unsafe.Add(ref selIds, i),
+ Unsafe.Add(ref counts, i),
+ Unsafe.Add(ref flags, i));
+ }
+ }
+
+ [Benchmark]
+ public void Vec128PlusScalar()
+ {
+ ref var dest = ref MemoryMarshal.GetArrayDataReference(DbRowBenchData.Buffer);
+ ref var parents = ref MemoryMarshal.GetArrayDataReference(DbRowBenchData.Parents);
+ ref var selIds = ref MemoryMarshal.GetArrayDataReference(DbRowBenchData.SelectionIds);
+ ref var counts = ref MemoryMarshal.GetArrayDataReference(DbRowBenchData.PropertyCounts);
+ ref var flags = ref MemoryMarshal.GetArrayDataReference(DbRowBenchData.Flags);
+
+ for (var i = 0; i < DbRowBenchData.Rows; i++)
+ {
+ DbRowBenchData.WriteStartObj_Vec128PlusScalar(
+ ref Unsafe.Add(ref dest, i * DbRowBenchData.RowSize),
+ Unsafe.Add(ref parents, i),
+ Unsafe.Add(ref selIds, i),
+ Unsafe.Add(ref counts, i),
+ Unsafe.Add(ref flags, i));
+ }
+ }
+}
diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompositeResultDocument.DbRow.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompositeResultDocument.DbRow.cs
index 0a84bde497f..bbfd2b5a79d 100644
--- a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompositeResultDocument.DbRow.cs
+++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompositeResultDocument.DbRow.cs
@@ -12,24 +12,31 @@ internal readonly struct DbRow
public const int Size = 20;
public const int UnknownSize = -1;
- // 27 bits for location + 2 bits OpRefType + 3 reserved bits
- private readonly int _locationAndOpRefType;
+ // Byte offsets used by MetaDb's direct-read fast paths.
+ internal const int TypeAndParentOffset = 0;
+ internal const int SelectionAndFlagsOffset = 4;
+ internal const int SizeOffset = 8;
+ internal const int LocationOrRowsOffset = 12;
+ internal const int SourceOffset = 16;
- // A Sign bit for HasComplexChildren + 31 bits for size/length
- private readonly int _sizeOrLengthUnion;
+ // 4 bits TokenType + 28 bits ParentRow
+ private readonly int _typeAndParent;
+
+ // 15 bits OperationReferenceId + 2 bits OperationReferenceType + 6 bits Flags + 9 reserved
+ private readonly int _selectionAndFlags;
- // 4 bits TokenType + 27 bits NumberOfRows + 1 reserved bit
- private readonly int _numberOfRowsTypeAndReserved;
+ // 1 bit HasComplexChildren (sign) + 31 bits SizeOrLength
+ private readonly int _sizeOrLengthUnion;
- // 15 bits SourceDocumentId + 17 bits (high 17 bits of ParentRow)
- private readonly int _sourceAndParentHigh;
+ // 27 bits — either Location or NumberOfRows, depending on TokenType/Flags
+ private readonly int _locationOrRows;
- // 15 bits OperationReferenceId + 6 bits Flags + 11 bits (low bits of ParentRow)
- private readonly int _selectionSetFlagsAndParentLow;
+ // 15 bits SourceDocumentId + 17 reserved
+ private readonly int _source;
public DbRow(
ElementTokenType tokenType,
- int location,
+ int location = 0,
int sizeOrLength = 0,
int sourceDocumentId = 0,
int parentRow = 0,
@@ -49,20 +56,24 @@ public DbRow(
Debug.Assert((byte)operationReferenceType <= 3); // 2 bits
Debug.Assert(Unsafe.SizeOf() == Size);
- _locationAndOpRefType = location | ((int)operationReferenceType << 27);
+ var locationOrRows = location != 0 ? location : numberOfRows;
+
+ _typeAndParent = ((int)tokenType & 0x0F) | (parentRow << 4);
+ _selectionAndFlags = operationReferenceId
+ | ((int)operationReferenceType << 15)
+ | ((int)flags << 17);
_sizeOrLengthUnion = sizeOrLength;
- _numberOfRowsTypeAndReserved = ((int)tokenType << 28) | (numberOfRows & 0x07FFFFFF);
- _sourceAndParentHigh = sourceDocumentId | ((parentRow >> 11) << 15);
- _selectionSetFlagsAndParentLow = operationReferenceId | ((int)flags << 15) | ((parentRow & 0x7FF) << 21);
+ _locationOrRows = locationOrRows & 0x07FFFFFF;
+ _source = sourceDocumentId & 0x7FFF;
}
///
/// Element token type (includes Reference for composition).
///
///
- /// 4 bits = possible values
+ /// 4 bits = 16 possible values
///
- public ElementTokenType TokenType => (ElementTokenType)(unchecked((uint)_numberOfRowsTypeAndReserved) >> 28);
+ public ElementTokenType TokenType => (ElementTokenType)(_typeAndParent & 0x0F);
///
/// Operation reference type indicating the type of GraphQL operation element.
@@ -71,15 +82,15 @@ public DbRow(
/// 2 bits = 4 possible values
///
public OperationReferenceType OperationReferenceType
- => (OperationReferenceType)((_locationAndOpRefType >> 27) & 0x03);
+ => (OperationReferenceType)((_selectionAndFlags >> 15) & 0x03);
///
- /// Byte offset in source data OR metaDb row index for references
+ /// Byte offset in source data or metaDb row index for references.
///
///
/// 27 bits = 134M limit
///
- public int Location => _locationAndOpRefType & 0x07FFFFFF;
+ public int Location => _locationOrRows & 0x07FFFFFF;
///
/// Length of data in JSON payload, number of elements if array or number of properties in an object.
@@ -95,7 +106,7 @@ public OperationReferenceType OperationReferenceType
public bool HasComplexChildren => _sizeOrLengthUnion < 0;
///
- /// Specifies if a size for the item has ben set.
+ /// Specifies if a size for the item has not been set.
///
public bool IsUnknownSize => _sizeOrLengthUnion == UnknownSize;
@@ -105,7 +116,7 @@ public OperationReferenceType OperationReferenceType
///
/// 27 bits = 134M rows
///
- public int NumberOfRows => _numberOfRowsTypeAndReserved & 0x07FFFFFF;
+ public int NumberOfRows => _locationOrRows & 0x07FFFFFF;
///
/// Which source JSON document contains the data.
@@ -113,7 +124,7 @@ public OperationReferenceType OperationReferenceType
///
/// 15 bits = 32K documents
///
- public int SourceDocumentId => _sourceAndParentHigh & 0x7FFF;
+ public int SourceDocumentId => _source & 0x7FFF;
///
/// Index of parent element in metadb for navigation and null propagation.
@@ -121,8 +132,7 @@ public OperationReferenceType OperationReferenceType
///
/// 28 bits = 268M rows
///
- public int ParentRow
- => ((int)((uint)_sourceAndParentHigh >> 15) << 11) | ((_selectionSetFlagsAndParentLow >> 21) & 0x7FF);
+ public int ParentRow => (int)((uint)_typeAndParent >> 4);
///
/// Reference to GraphQL selection set or selection metadata.
@@ -130,7 +140,7 @@ public int ParentRow
///
/// 15 bits = 32K selections
///
- public int OperationReferenceId => _selectionSetFlagsAndParentLow & 0x7FFF;
+ public int OperationReferenceId => _selectionAndFlags & 0x7FFF;
///
/// Element metadata flags.
@@ -138,7 +148,7 @@ public int ParentRow
///
/// 6 bits = 64 combinations
///
- public ElementFlags Flags => (ElementFlags)((_selectionSetFlagsAndParentLow >> 15) & 0x3F);
+ public ElementFlags Flags => (ElementFlags)((_selectionAndFlags >> 17) & 0x3F);
///
/// True for primitive JSON values (strings, numbers, booleans, null).
diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompositeResultDocument.MetaDb.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompositeResultDocument.MetaDb.cs
index 4e6ce57c80b..1442ef6dc8d 100644
--- a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompositeResultDocument.MetaDb.cs
+++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompositeResultDocument.MetaDb.cs
@@ -2,6 +2,7 @@
using System.Diagnostics;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
+using System.Runtime.Intrinsics;
using HotChocolate.Buffers;
using static HotChocolate.Fusion.Text.Json.MetaDbEventSource;
@@ -11,7 +12,6 @@ public sealed partial class CompositeResultDocument
{
internal struct MetaDb : IDisposable
{
- private const int TokenTypeOffset = 8;
private static readonly ArrayPool s_arrayPool = ArrayPool.Shared;
private byte[][] _chunks;
@@ -56,73 +56,324 @@ internal Cursor Append(
int numberOfRows = 0,
ElementFlags flags = ElementFlags.None)
{
- var log = Log;
+ var (chunk, byteOffset, cursor) = ReserveRow();
+
+ var row = new DbRow(
+ tokenType,
+ location,
+ sizeOrLength,
+ sourceDocumentId,
+ parentRow,
+ operationReferenceId,
+ operationReferenceType,
+ numberOfRows,
+ flags);
+
+ ref var dest = ref MemoryMarshal.GetArrayDataReference(chunk);
+ Unsafe.WriteUnaligned(ref Unsafe.Add(ref dest, byteOffset), row);
+
+ _next = cursor + 1;
+ return cursor;
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ internal Cursor AppendNull(int parentRow)
+ {
+ Debug.Assert(parentRow is >= 0 and <= 0x0FFFFFFF);
+
+ var (chunk, byteOffset, cursor) = ReserveRow();
+
+ ref var dest = ref MemoryMarshal.GetArrayDataReference(chunk);
+ ref var row = ref Unsafe.Add(ref dest, byteOffset);
+
+ // int 0: TokenType=None(0) + parentRow in high 28 bits
+ Unsafe.WriteUnaligned(ref row, parentRow << 4);
+ // ints 1..4 stamped zero via a single 16-byte vector store
+ Vector128.Zero.StoreUnsafe(ref Unsafe.Add(ref row, 4));
+
+ _next = cursor + 1;
+ return cursor;
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ internal Cursor AppendEmptyProperty(int parentRow, int selectionId, ElementFlags flags)
+ {
+ Debug.Assert(parentRow is >= 0 and <= 0x0FFFFFFF);
+ Debug.Assert(selectionId is >= 0 and <= 0x7FFF);
+ Debug.Assert((byte)flags <= 63);
+
+ var (chunk, byteOffset, cursor) = ReserveRow();
+
+ ref var dest = ref MemoryMarshal.GetArrayDataReference(chunk);
+ ref var row = ref Unsafe.Add(ref dest, byteOffset);
+
+ // int 0: PropertyName token + parentRow
+ Unsafe.WriteUnaligned(
+ ref row,
+ (int)ElementTokenType.PropertyName | (parentRow << 4));
+
+ // int 1: selectionId + opRefType=Selection + flags
+ Unsafe.WriteUnaligned(
+ ref Unsafe.Add(ref row, 4),
+ selectionId
+ | ((int)OperationReferenceType.Selection << 15)
+ | ((int)flags << 17));
+
+ // ints 2..4 must be zero
+ Unsafe.InitBlockUnaligned(ref Unsafe.Add(ref row, 8), 0, 12);
+
+ _next = cursor + 1;
+ return cursor;
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ internal Cursor AppendEmptyPropertyWithNullValue(int parentRow, int selectionId, ElementFlags flags)
+ {
+ Debug.Assert(parentRow is >= 0 and <= 0x0FFFFFFF);
+ Debug.Assert(selectionId is >= 0 and <= 0x7FFF);
+ Debug.Assert((byte)flags <= 63);
+
var next = _next;
- var chunkIndex = next.Chunk;
var byteOffset = next.ByteOffset;
+ var chunks = _chunks;
- var chunks = _chunks.AsSpan();
- var chunksLength = chunks.Length;
+ // Fast path: both rows fit in the current chunk.
+ if (byteOffset + (DbRow.Size * 2) <= Cursor.ChunkBytes
+ && (uint)next.Chunk < (uint)chunks.Length
+ && chunks[next.Chunk] is { Length: > 0 } chunk)
+ {
+ ref var dest = ref MemoryMarshal.GetArrayDataReference(chunk);
+ ref var row0 = ref Unsafe.Add(ref dest, byteOffset);
+
+ // Row 0 — PropertyName
+ Unsafe.WriteUnaligned(
+ ref row0,
+ (int)ElementTokenType.PropertyName | (parentRow << 4));
+ Unsafe.WriteUnaligned(
+ ref Unsafe.Add(ref row0, 4),
+ selectionId
+ | ((int)OperationReferenceType.Selection << 15)
+ | ((int)flags << 17));
+ Unsafe.InitBlockUnaligned(ref Unsafe.Add(ref row0, 8), 0, 12);
+
+ // Row 1 — None value with parent = index of Row 0 (= next.Index)
+ ref var row1 = ref Unsafe.Add(ref row0, DbRow.Size);
+ Unsafe.WriteUnaligned(ref row1, next.Index << 4);
+ Vector128.Zero.StoreUnsafe(ref Unsafe.Add(ref row1, 4));
+
+ _next = next + 2;
+ return next;
+ }
- if (byteOffset + DbRow.Size > Cursor.ChunkBytes)
+ // Slow path — crosses chunk boundary or chunk not yet rented
+ var propCursor = AppendEmptyProperty(parentRow, selectionId, flags);
+ AppendNull(propCursor.Index);
+ return propCursor;
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ internal Cursor AppendStartObject(int parentRow, int selectionSetId, int propertyCount, ElementFlags flags)
+ {
+ Debug.Assert(parentRow is >= 0 and <= 0x0FFFFFFF);
+ Debug.Assert(selectionSetId is >= 0 and <= 0x7FFF);
+ Debug.Assert(propertyCount is >= 0 and <= 0x03FFFFFF); // room for (count*2)+1 in 27 bits
+ Debug.Assert((byte)flags <= 63);
+
+ var (chunk, byteOffset, cursor) = ReserveRow();
+
+ ref var dest = ref MemoryMarshal.GetArrayDataReference(chunk);
+ ref var row = ref Unsafe.Add(ref dest, byteOffset);
+
+ // int 0: token + parent
+ Unsafe.WriteUnaligned(
+ ref row,
+ (int)ElementTokenType.StartObject | (parentRow << 4));
+
+ // int 1: selectionSetId + SelectionSet + flags
+ Unsafe.WriteUnaligned(
+ ref Unsafe.Add(ref row, 4),
+ selectionSetId
+ | ((int)OperationReferenceType.SelectionSet << 15)
+ | ((int)flags << 17));
+
+ // int 2: sizeOrLength = property count
+ Unsafe.WriteUnaligned(ref Unsafe.Add(ref row, 8), propertyCount);
+
+ // int 3: numberOfRows = (count * 2) + 1 (1 property = 2 rows: name + value)
+ Unsafe.WriteUnaligned(
+ ref Unsafe.Add(ref row, 12),
+ ((propertyCount * 2) + 1) & 0x07FFFFFF);
+
+ // int 4: zero
+ Unsafe.WriteUnaligned(ref Unsafe.Add(ref row, 16), 0);
+
+ _next = cursor + 1;
+ return cursor;
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ internal Cursor AppendStartArray(int parentRow, int length, ElementFlags flags)
+ {
+ Debug.Assert(parentRow is >= 0 and <= 0x0FFFFFFF);
+ Debug.Assert(length is >= 0 and <= int.MaxValue);
+ Debug.Assert((byte)flags <= 63);
+
+ var (chunk, byteOffset, cursor) = ReserveRow();
+
+ ref var dest = ref MemoryMarshal.GetArrayDataReference(chunk);
+ ref var row = ref Unsafe.Add(ref dest, byteOffset);
+
+ // int 0: token + parent
+ Unsafe.WriteUnaligned(
+ ref row,
+ (int)ElementTokenType.StartArray | (parentRow << 4));
+
+ // int 1: flags only (no OpRefId / no OpRefType for arrays)
+ Unsafe.WriteUnaligned(
+ ref Unsafe.Add(ref row, 4),
+ (int)flags << 17);
+
+ // int 2: sizeOrLength = length
+ Unsafe.WriteUnaligned(ref Unsafe.Add(ref row, 8), length);
+
+ // int 3: numberOfRows = length + 1
+ Unsafe.WriteUnaligned(ref Unsafe.Add(ref row, 12), (length + 1) & 0x07FFFFFF);
+
+ // int 4: zero
+ Unsafe.WriteUnaligned(ref Unsafe.Add(ref row, 16), 0);
+
+ _next = cursor + 1;
+ return cursor;
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ internal Cursor AppendEndObject()
+ {
+ var (chunk, byteOffset, cursor) = ReserveRow();
+
+ ref var dest = ref MemoryMarshal.GetArrayDataReference(chunk);
+ ref var row = ref Unsafe.Add(ref dest, byteOffset);
+
+ Unsafe.WriteUnaligned(ref row, (int)ElementTokenType.EndObject);
+ Unsafe.InitBlockUnaligned(ref Unsafe.Add(ref row, 4), 0, 16);
+
+ _next = cursor + 1;
+ return cursor;
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ internal Cursor AppendEndArray()
+ {
+ var (chunk, byteOffset, cursor) = ReserveRow();
+
+ ref var dest = ref MemoryMarshal.GetArrayDataReference(chunk);
+ ref var row = ref Unsafe.Add(ref dest, byteOffset);
+
+ Unsafe.WriteUnaligned(ref row, (int)ElementTokenType.EndArray);
+ Unsafe.InitBlockUnaligned(ref Unsafe.Add(ref row, 4), 0, 16);
+
+ _next = cursor + 1;
+ return cursor;
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ internal void AppendNullRange(int parentRow, int count)
+ {
+ Debug.Assert(parentRow is >= 0 and <= 0x0FFFFFFF);
+ Debug.Assert(count >= 0);
+
+ if (count == 0)
{
- chunkIndex++;
- byteOffset = 0;
- next = Cursor.FromByteOffset(chunkIndex, byteOffset);
+ return;
}
- // make sure we have enough space for the chunk referenced by the chunkIndex.
- if (chunkIndex >= chunksLength)
+ var next = _next;
+ var byteOffset = next.ByteOffset;
+ var bytesNeeded = count * DbRow.Size;
+
+ // Fast path: all rows fit in the current chunk.
+ if (byteOffset + bytesNeeded <= Cursor.ChunkBytes
+ && next.Chunk < _chunks.Length
+ && _chunks[next.Chunk].Length > 0)
{
- // if we do not have enough space we will double the size we have for
- // chunks of memory.
- var nextChunksLength = chunksLength * 2;
- var newChunks = s_arrayPool.Rent(nextChunksLength);
- log.ChunksExpanded(2, chunksLength, nextChunksLength);
+ var chunk = _chunks[next.Chunk];
+
+ ref var dest = ref MemoryMarshal.GetArrayDataReference(chunk);
+ ref var region = ref Unsafe.Add(ref dest, byteOffset);
- // copy chunks to new buffer
- Array.Copy(_chunks, newChunks, chunksLength);
+ // Zero the whole range once, then stamp parentRow into int 0 of each row.
+ Unsafe.InitBlockUnaligned(ref region, 0, (uint)bytesNeeded);
- for (var i = chunksLength; i < nextChunksLength; i++)
+ var parentPacked = parentRow << 4;
+ for (var i = 0; i < count; i++)
{
- newChunks[i] = [];
+ Unsafe.WriteUnaligned(
+ ref Unsafe.Add(ref region, i * DbRow.Size),
+ parentPacked);
}
- // clear and return old chunks buffer
- chunks.Clear();
- s_arrayPool.Return(_chunks);
+ _next = next + count;
+ return;
+ }
- // assign new chunks buffer
- _chunks = newChunks;
- chunks = newChunks.AsSpan();
+ // Slow path: crosses chunk boundary or chunk not yet rented.
+ for (var i = 0; i < count; i++)
+ {
+ AppendNull(parentRow);
}
+ }
- var chunk = chunks[chunkIndex];
+ ///
+ /// Reserves the next row slot, advancing to a new chunk if necessary. Does not
+ /// advance ; the caller updates it after writing the row.
+ ///
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ private (byte[] chunk, int byteOffset, Cursor cursor) ReserveRow()
+ {
+ var next = _next;
+ var chunkIndex = next.Chunk;
+ var byteOffset = next.ByteOffset;
+
+ if (byteOffset + DbRow.Size > Cursor.ChunkBytes)
+ {
+ chunkIndex++;
+ byteOffset = 0;
+ next = Cursor.FromByteOffset(chunkIndex, byteOffset);
+ }
- // if the chunk is empty we did not yet rent any memory for it
+ var chunks = _chunks.AsSpan();
+ if (chunkIndex >= chunks.Length)
+ {
+ GrowChunks(chunks.Length);
+ chunks = _chunks.AsSpan();
+ }
+
+ var chunk = chunks[chunkIndex];
if (chunk.Length == 0)
{
chunk = chunks[chunkIndex] = JsonMemory.Rent(JsonMemoryKind.Metadata);
- log.ChunkAllocated(2, chunkIndex);
+ Log.ChunkAllocated(2, chunkIndex);
}
- var row = new DbRow(
- tokenType,
- location,
- sizeOrLength,
- sourceDocumentId,
- parentRow,
- operationReferenceId,
- operationReferenceType,
- numberOfRows,
- flags);
+ return (chunk, byteOffset, next);
+ }
- ref var dest = ref MemoryMarshal.GetArrayDataReference(chunk);
- Unsafe.WriteUnaligned(ref Unsafe.Add(ref dest, byteOffset), row);
+ private void GrowChunks(int currentLength)
+ {
+ var nextLength = currentLength * 2;
+ var newChunks = s_arrayPool.Rent(nextLength);
+ Log.ChunksExpanded(2, currentLength, nextLength);
+
+ Array.Copy(_chunks, newChunks, currentLength);
+
+ for (var i = currentLength; i < newChunks.Length; i++)
+ {
+ newChunks[i] = [];
+ }
- // Advance write head by one row
- _next = next + 1;
- return next;
+ _chunks.AsSpan().Clear();
+ s_arrayPool.Return(_chunks);
+ _chunks = newChunks;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
@@ -187,16 +438,16 @@ internal DbRow GetValue(ref Cursor cursor)
var chunks = _chunks.AsSpan();
var span = chunks[cursor.Chunk].AsSpan(cursor.ByteOffset);
- var union = MemoryMarshal.Read(span[TokenTypeOffset..]);
- var tokenType = (ElementTokenType)(union >> 28);
+ var typeAndParent = MemoryMarshal.Read(span);
+ var tokenType = (ElementTokenType)(typeAndParent & 0x0F);
if (tokenType is ElementTokenType.Reference)
{
- var index = MemoryMarshal.Read(span) & 0x07FFFFFF;
+ var index = MemoryMarshal.Read(span[DbRow.LocationOrRowsOffset..]) & 0x07FFFFFF;
cursor = Cursor.FromIndex(index);
- span = chunks[cursor.Chunk].AsSpan(cursor.ByteOffset + TokenTypeOffset);
- union = MemoryMarshal.Read(span);
- tokenType = (ElementTokenType)(union >> 28);
+ span = chunks[cursor.Chunk].AsSpan(cursor.ByteOffset);
+ typeAndParent = MemoryMarshal.Read(span);
+ tokenType = (ElementTokenType)(typeAndParent & 0x0F);
}
return (cursor, tokenType);
@@ -207,10 +458,10 @@ internal int GetLocation(Cursor cursor)
{
AssertValidCursor(cursor);
- var span = _chunks[cursor.Chunk].AsSpan(cursor.ByteOffset);
+ var span = _chunks[cursor.Chunk].AsSpan(cursor.ByteOffset + DbRow.LocationOrRowsOffset);
- var locationAndOpRefType = MemoryMarshal.Read(span);
- return locationAndOpRefType & 0x07FFFFFF;
+ var locationOrRows = MemoryMarshal.Read(span);
+ return locationOrRows & 0x07FFFFFF;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
@@ -218,10 +469,10 @@ internal Cursor GetLocationCursor(Cursor cursor)
{
AssertValidCursor(cursor);
- var span = _chunks[cursor.Chunk].AsSpan(cursor.ByteOffset);
+ var span = _chunks[cursor.Chunk].AsSpan(cursor.ByteOffset + DbRow.LocationOrRowsOffset);
- var locationAndOpRefType = MemoryMarshal.Read(span);
- return Cursor.FromIndex(locationAndOpRefType & 0x07FFFFFF);
+ var locationOrRows = MemoryMarshal.Read(span);
+ return Cursor.FromIndex(locationOrRows & 0x07FFFFFF);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
@@ -231,11 +482,8 @@ internal int GetParent(Cursor cursor)
var span = _chunks[cursor.Chunk].AsSpan(cursor.ByteOffset);
- var sourceAndParentHigh = MemoryMarshal.Read(span[12..]);
- var selectionSetFlagsAndParentLow = MemoryMarshal.Read(span[16..]);
-
- return (sourceAndParentHigh >>> 15 << 11)
- | ((selectionSetFlagsAndParentLow >> 21) & 0x7FF);
+ var typeAndParent = MemoryMarshal.Read(span);
+ return (int)((uint)typeAndParent >> 4);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
@@ -245,11 +493,8 @@ internal Cursor GetParentCursor(Cursor cursor)
var span = _chunks[cursor.Chunk].AsSpan(cursor.ByteOffset);
- var sourceAndParentHigh = MemoryMarshal.Read(span[12..]);
- var selectionSetFlagsAndParentLow = MemoryMarshal.Read(span[16..]);
-
- var index = (sourceAndParentHigh >>> 15 << 11)
- | ((selectionSetFlagsAndParentLow >> 21) & 0x7FF);
+ var typeAndParent = MemoryMarshal.Read(span);
+ var index = (int)((uint)typeAndParent >> 4);
return Cursor.FromIndex(index);
}
@@ -259,10 +504,11 @@ internal int GetNumberOfRows(Cursor cursor)
{
AssertValidCursor(cursor);
- var span = _chunks[cursor.Chunk].AsSpan(cursor.ByteOffset + TokenTypeOffset);
+ // NumberOfRows shares storage with Location in int 3.
+ var span = _chunks[cursor.Chunk].AsSpan(cursor.ByteOffset + DbRow.LocationOrRowsOffset);
var value = MemoryMarshal.Read(span);
- return value & 0x0FFFFFFF;
+ return value & 0x07FFFFFF;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
@@ -270,10 +516,10 @@ internal ElementFlags GetFlags(Cursor cursor)
{
AssertValidCursor(cursor);
- var span = _chunks[cursor.Chunk].AsSpan(cursor.ByteOffset + 16);
+ var span = _chunks[cursor.Chunk].AsSpan(cursor.ByteOffset + DbRow.SelectionAndFlagsOffset);
- var selectionSetFlagsAndParentLow = MemoryMarshal.Read(span);
- return (ElementFlags)((selectionSetFlagsAndParentLow >> 15) & 0x3F);
+ var selectionAndFlags = MemoryMarshal.Read(span);
+ return (ElementFlags)((selectionAndFlags >> 17) & 0x3F);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
@@ -282,11 +528,12 @@ internal void SetFlags(Cursor cursor, ElementFlags flags)
AssertValidCursor(cursor);
Debug.Assert((byte)flags <= 63, "Flags value exceeds 6-bit limit");
- var fieldSpan = _chunks[cursor.Chunk].AsSpan(cursor.ByteOffset + 16);
+ var fieldSpan = _chunks[cursor.Chunk].AsSpan(cursor.ByteOffset + DbRow.SelectionAndFlagsOffset);
var currentValue = MemoryMarshal.Read(fieldSpan);
- var clearedValue = currentValue & 0xFFE07FFF; // ~(0x3F << 15)
- var newValue = (int)(clearedValue | (uint)((int)flags << 15));
+ // Clear bits 17..22 (6-bit Flags region) then OR new flags in.
+ var clearedValue = (int)((uint)currentValue & ~(0x3Fu << 17));
+ var newValue = clearedValue | ((int)flags << 17);
MemoryMarshal.Write(fieldSpan, newValue);
}
@@ -296,7 +543,7 @@ internal int GetSizeOrLength(Cursor cursor)
{
AssertValidCursor(cursor);
- var span = _chunks[cursor.Chunk].AsSpan(cursor.ByteOffset + 4);
+ var span = _chunks[cursor.Chunk].AsSpan(cursor.ByteOffset + DbRow.SizeOffset);
var value = MemoryMarshal.Read(span);
return value & int.MaxValue;
@@ -308,7 +555,7 @@ internal void SetSizeOrLength(Cursor cursor, int sizeOrLength)
AssertValidCursor(cursor);
Debug.Assert(sizeOrLength >= 0, "SizeOrLength value exceeds 31-bit limit");
- var fieldSpan = _chunks[cursor.Chunk].AsSpan(cursor.ByteOffset + 4);
+ var fieldSpan = _chunks[cursor.Chunk].AsSpan(cursor.ByteOffset + DbRow.SizeOffset);
var currentValue = MemoryMarshal.Read(fieldSpan);
// Keep only the sign bit (HasComplexChildren)
@@ -322,14 +569,15 @@ internal void SetSizeOrLength(Cursor cursor, int sizeOrLength)
internal void SetNumberOfRows(Cursor cursor, int numberOfRows)
{
AssertValidCursor(cursor);
- Debug.Assert(numberOfRows >= 0 && numberOfRows <= 0x0FFFFFFF, "NumberOfRows value exceeds 28-bit limit");
+ Debug.Assert(numberOfRows >= 0 && numberOfRows <= 0x07FFFFFF, "NumberOfRows value exceeds 27-bit limit");
- var fieldSpan = _chunks[cursor.Chunk].AsSpan(cursor.ByteOffset + TokenTypeOffset);
+ // NumberOfRows shares storage with Location in int 3. Preserve the 5 reserved
+ // high bits and write the 27-bit value into the low bits.
+ var fieldSpan = _chunks[cursor.Chunk].AsSpan(cursor.ByteOffset + DbRow.LocationOrRowsOffset);
var currentValue = MemoryMarshal.Read(fieldSpan);
- // Keep only the top 4 bits (token type)
- var clearedValue = currentValue & unchecked((int)0xF0000000);
- var newValue = clearedValue | (numberOfRows & 0x0FFFFFFF);
+ var clearedValue = (int)((uint)currentValue & 0xF8000000u);
+ var newValue = clearedValue | (numberOfRows & 0x07FFFFFF);
MemoryMarshal.Write(fieldSpan, newValue);
}
@@ -339,15 +587,15 @@ internal ElementTokenType GetElementTokenType(Cursor cursor, bool resolveReferen
{
AssertValidCursor(cursor);
- var union = MemoryMarshal.Read(_chunks[cursor.Chunk].AsSpan(cursor.ByteOffset + TokenTypeOffset));
- var tokenType = (ElementTokenType)(union >> 28);
+ var typeAndParent = MemoryMarshal.Read(_chunks[cursor.Chunk].AsSpan(cursor.ByteOffset));
+ var tokenType = (ElementTokenType)(typeAndParent & 0x0F);
if (resolveReferences && tokenType == ElementTokenType.Reference)
{
var idx = GetLocation(cursor);
var resolved = Cursor.FromIndex(idx);
- union = MemoryMarshal.Read(_chunks[resolved.Chunk].AsSpan(resolved.ByteOffset + TokenTypeOffset));
- tokenType = (ElementTokenType)(union >> 28);
+ typeAndParent = MemoryMarshal.Read(_chunks[resolved.Chunk].AsSpan(resolved.ByteOffset));
+ tokenType = (ElementTokenType)(typeAndParent & 0x0F);
}
return tokenType;
diff --git a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompositeResultDocument.cs b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompositeResultDocument.cs
index 314461f23cd..89427e67fc1 100644
--- a/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompositeResultDocument.cs
+++ b/src/HotChocolate/Fusion/src/Fusion.Execution/Text/Json/CompositeResultDocument.cs
@@ -341,16 +341,15 @@ private ReadOnlySpan ReadRawValue(DbRow row)
internal CompositeResultElement CreateObject(Cursor parent, SelectionSet selectionSet)
{
- var startObjectCursor = WriteStartObject(parent, selectionSet.Id);
+ var selections = selectionSet.Selections;
+ var startObjectCursor = WriteStartObject(parent, selectionSet.Id, selections.Length);
- var selectionCount = 0;
- foreach (var selection in selectionSet.Selections)
+ foreach (var selection in selections)
{
WriteEmptyProperty(startObjectCursor, selection);
- selectionCount++;
}
- WriteEndObject(startObjectCursor, selectionCount);
+ _metaDb.AppendEndObject();
return new CompositeResultElement(this, startObjectCursor);
}
@@ -359,10 +358,7 @@ internal CompositeResultElement CreateArray(Cursor parent, int length)
{
var cursor = WriteStartArray(parent, length);
- for (var i = 0; i < length; i++)
- {
- WriteEmptyValue(cursor);
- }
+ _metaDb.AppendNullRange(cursor.Index, length);
WriteEndArray();
@@ -431,7 +427,7 @@ internal void AssignNullValue(CompositeResultElement target)
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
- private Cursor WriteStartObject(Cursor parent, int selectionSetId = 0)
+ private Cursor WriteStartObject(Cursor parent, int selectionSetId, int propertyCount)
{
var flags = ElementFlags.None;
var parentRow = parent.Index;
@@ -442,21 +438,7 @@ private Cursor WriteStartObject(Cursor parent, int selectionSetId = 0)
flags = ElementFlags.IsRoot;
}
- return _metaDb.Append(
- ElementTokenType.StartObject,
- parentRow: parentRow,
- operationReferenceId: selectionSetId,
- operationReferenceType: OperationReferenceType.SelectionSet,
- flags: flags);
- }
-
- [MethodImpl(MethodImplOptions.AggressiveInlining)]
- private void WriteEndObject(Cursor startObjectCursor, int length)
- {
- _metaDb.Append(ElementTokenType.EndObject);
-
- _metaDb.SetNumberOfRows(startObjectCursor, (length * 2) + 1);
- _metaDb.SetSizeOrLength(startObjectCursor, length);
+ return _metaDb.AppendStartObject(parentRow, selectionSetId, propertyCount, flags);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
@@ -471,16 +453,11 @@ private Cursor WriteStartArray(Cursor parent, int length = 0)
flags = ElementFlags.IsRoot;
}
- return _metaDb.Append(
- ElementTokenType.StartArray,
- sizeOrLength: length,
- parentRow: parentRow,
- numberOfRows: length + 1,
- flags: flags);
+ return _metaDb.AppendStartArray(parentRow, length, flags);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
- private void WriteEndArray() => _metaDb.Append(ElementTokenType.EndArray);
+ private void WriteEndArray() => _metaDb.AppendEndArray();
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private void WriteEmptyProperty(Cursor parent, Selection selection)
@@ -502,25 +479,14 @@ private void WriteEmptyProperty(Cursor parent, Selection selection)
flags |= ElementFlags.IsNullable;
}
- var prop = _metaDb.Append(
- ElementTokenType.PropertyName,
+ _metaDb.AppendEmptyPropertyWithNullValue(
parentRow: parent.Index,
- operationReferenceId: selection.Id,
- operationReferenceType: OperationReferenceType.Selection,
+ selectionId: selection.Id,
flags: flags);
-
- _metaDb.Append(
- ElementTokenType.None,
- parentRow: prop.Index);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
- private void WriteEmptyValue(Cursor parent)
- {
- _metaDb.Append(
- ElementTokenType.None,
- parentRow: parent.Index);
- }
+ private void WriteEmptyValue(Cursor parent) => _metaDb.AppendNull(parent.Index);
private static void CheckExpectedType(ElementTokenType expected, ElementTokenType actual)
{
diff --git a/src/HotChocolate/Fusion/test/Fusion.Execution.Tests/Text/Json/CompositeResultDocumentMetaDbTests.cs b/src/HotChocolate/Fusion/test/Fusion.Execution.Tests/Text/Json/CompositeResultDocumentMetaDbTests.cs
index a2c39cf6d70..ab39cedca2f 100644
--- a/src/HotChocolate/Fusion/test/Fusion.Execution.Tests/Text/Json/CompositeResultDocumentMetaDbTests.cs
+++ b/src/HotChocolate/Fusion/test/Fusion.Execution.Tests/Text/Json/CompositeResultDocumentMetaDbTests.cs
@@ -307,5 +307,569 @@ public void Append_ExceedsInitialChunkCapacity_ExpandsChunkArray()
Assert.Equal((totalRowsToAdd - 1) % 100, lastRow.SizeOrLength);
}
+ [Fact]
+ public void Append_StoresAndReadsNumberOfRows()
+ {
+ // Arrange & Act
+ var index = _metaDb.Append(
+ ElementTokenType.StartObject,
+ sizeOrLength: 3,
+ parentRow: 10,
+ numberOfRows: 7);
+
+ // Assert
+ var row = _metaDb.Get(index);
+ Assert.Equal(ElementTokenType.StartObject, row.TokenType);
+ Assert.Equal(3, row.SizeOrLength);
+ Assert.Equal(10, row.ParentRow);
+ Assert.Equal(7, row.NumberOfRows);
+ }
+
+ [Fact]
+ public void Append_WithMaxNumberOfRows_StoresCorrectly()
+ {
+ // Arrange
+ const int maxNumberOfRows = 0x07FFFFFF; // 27 bits
+
+ // Act
+ var index = _metaDb.Append(
+ ElementTokenType.StartArray,
+ numberOfRows: maxNumberOfRows);
+
+ // Assert
+ var row = _metaDb.Get(index);
+ Assert.Equal(maxNumberOfRows, row.NumberOfRows);
+ Assert.Equal(ElementTokenType.StartArray, row.TokenType);
+ }
+
+ [Fact]
+ public void SetNumberOfRows_UpdatesValueWithoutAffectingOtherFields()
+ {
+ // Arrange
+ var index = _metaDb.Append(
+ ElementTokenType.StartObject,
+ sizeOrLength: 5,
+ parentRow: 100,
+ operationReferenceId: 42,
+ flags: ElementFlags.IsRoot);
+
+ // Act
+ _metaDb.SetNumberOfRows(index, 11);
+
+ // Assert — NumberOfRows updated, other fields preserved
+ var row = _metaDb.Get(index);
+ Assert.Equal(11, row.NumberOfRows);
+ Assert.Equal(ElementTokenType.StartObject, row.TokenType);
+ Assert.Equal(5, row.SizeOrLength);
+ Assert.Equal(100, row.ParentRow);
+ Assert.Equal(42, row.OperationReferenceId);
+ Assert.Equal(ElementFlags.IsRoot, row.Flags);
+ }
+
+ [Fact]
+ public void SetFlags_UpdatesValueWithoutAffectingOtherFields()
+ {
+ // Arrange
+ var index = _metaDb.Append(
+ ElementTokenType.PropertyName,
+ parentRow: 100,
+ operationReferenceId: 42,
+ operationReferenceType: OperationReferenceType.Selection,
+ flags: ElementFlags.None);
+
+ // Act
+ _metaDb.SetFlags(index, ElementFlags.IsNullable | ElementFlags.IsRoot);
+
+ // Assert — Flags updated, other fields preserved
+ var row = _metaDb.Get(index);
+ Assert.Equal(ElementFlags.IsNullable | ElementFlags.IsRoot, row.Flags);
+ Assert.Equal(ElementTokenType.PropertyName, row.TokenType);
+ Assert.Equal(100, row.ParentRow);
+ Assert.Equal(42, row.OperationReferenceId);
+ Assert.Equal(OperationReferenceType.Selection, row.OperationReferenceType);
+ }
+
+ [Fact]
+ public void SetSizeOrLength_UpdatesValueWithoutAffectingOtherFields()
+ {
+ // Arrange
+ var index = _metaDb.Append(
+ ElementTokenType.StartObject,
+ sizeOrLength: 0,
+ parentRow: 7,
+ numberOfRows: 9);
+
+ // Act
+ _metaDb.SetSizeOrLength(index, 15);
+
+ // Assert
+ var row = _metaDb.Get(index);
+ Assert.Equal(15, row.SizeOrLength);
+ Assert.Equal(ElementTokenType.StartObject, row.TokenType);
+ Assert.Equal(7, row.ParentRow);
+ Assert.Equal(9, row.NumberOfRows);
+ }
+
+ [Fact]
+ public void AppendNull_WritesTokenTypeNoneAndParent()
+ {
+ // Act
+ var cursor = _metaDb.AppendNull(parentRow: 42);
+
+ // Assert
+ var row = _metaDb.Get(cursor);
+ Assert.Equal(ElementTokenType.None, row.TokenType);
+ Assert.Equal(42, row.ParentRow);
+ Assert.Equal(0, row.Location);
+ Assert.Equal(0, row.SizeOrLength);
+ Assert.Equal(0, row.SourceDocumentId);
+ Assert.Equal(0, row.OperationReferenceId);
+ Assert.Equal(ElementFlags.None, row.Flags);
+ Assert.Equal(OperationReferenceType.None, row.OperationReferenceType);
+ }
+
+ [Fact]
+ public void AppendNull_AdvancesCursor()
+ {
+ // Act
+ var c0 = _metaDb.AppendNull(0);
+ var c1 = _metaDb.AppendNull(c0.Index);
+ var c2 = _metaDb.AppendNull(c1.Index);
+
+ // Assert
+ Assert.Equal(0, c0.Index);
+ Assert.Equal(1, c1.Index);
+ Assert.Equal(2, c2.Index);
+ Assert.Equal(0, _metaDb.Get(c0).ParentRow);
+ Assert.Equal(c0.Index, _metaDb.Get(c1).ParentRow);
+ Assert.Equal(c1.Index, _metaDb.Get(c2).ParentRow);
+ }
+
+ [Fact]
+ public void AppendNull_IsEquivalentToGenericAppend()
+ {
+ // Arrange — compare specialized vs generic path
+ using var reference = MetaDb.CreateForEstimatedRows(10);
+ var refCursor = reference.Append(ElementTokenType.None, parentRow: 123);
+
+ // Act
+ var cursor = _metaDb.AppendNull(parentRow: 123);
+
+ // Assert — rows must be byte-for-byte identical
+ var refRow = reference.Get(refCursor);
+ var row = _metaDb.Get(cursor);
+
+ Assert.Equal(refRow.TokenType, row.TokenType);
+ Assert.Equal(refRow.ParentRow, row.ParentRow);
+ Assert.Equal(refRow.Location, row.Location);
+ Assert.Equal(refRow.SizeOrLength, row.SizeOrLength);
+ Assert.Equal(refRow.NumberOfRows, row.NumberOfRows);
+ Assert.Equal(refRow.SourceDocumentId, row.SourceDocumentId);
+ Assert.Equal(refRow.OperationReferenceId, row.OperationReferenceId);
+ Assert.Equal(refRow.OperationReferenceType, row.OperationReferenceType);
+ Assert.Equal(refRow.Flags, row.Flags);
+ }
+
+ [Fact]
+ public void AppendEmptyProperty_WritesAllFields()
+ {
+ // Act
+ var cursor = _metaDb.AppendEmptyProperty(
+ parentRow: 7,
+ selectionId: 99,
+ flags: ElementFlags.IsNullable | ElementFlags.IsInternal);
+
+ // Assert
+ var row = _metaDb.Get(cursor);
+ Assert.Equal(ElementTokenType.PropertyName, row.TokenType);
+ Assert.Equal(7, row.ParentRow);
+ Assert.Equal(99, row.OperationReferenceId);
+ Assert.Equal(OperationReferenceType.Selection, row.OperationReferenceType);
+ Assert.Equal(ElementFlags.IsNullable | ElementFlags.IsInternal, row.Flags);
+ Assert.Equal(0, row.Location);
+ Assert.Equal(0, row.SizeOrLength);
+ Assert.Equal(0, row.NumberOfRows);
+ Assert.Equal(0, row.SourceDocumentId);
+ }
+
+ [Fact]
+ public void AppendEmptyProperty_WithNoFlags()
+ {
+ // Act
+ var cursor = _metaDb.AppendEmptyProperty(
+ parentRow: 0,
+ selectionId: 1,
+ flags: ElementFlags.None);
+
+ // Assert
+ var row = _metaDb.Get(cursor);
+ Assert.Equal(ElementTokenType.PropertyName, row.TokenType);
+ Assert.Equal(0, row.ParentRow);
+ Assert.Equal(1, row.OperationReferenceId);
+ Assert.Equal(ElementFlags.None, row.Flags);
+ }
+
+ [Fact]
+ public void AppendEmptyProperty_IsEquivalentToGenericAppend()
+ {
+ // Arrange
+ using var reference = MetaDb.CreateForEstimatedRows(10);
+ var refCursor = reference.Append(
+ ElementTokenType.PropertyName,
+ parentRow: 13,
+ operationReferenceId: 77,
+ operationReferenceType: OperationReferenceType.Selection,
+ flags: ElementFlags.IsExcluded);
+
+ // Act
+ var cursor = _metaDb.AppendEmptyProperty(
+ parentRow: 13,
+ selectionId: 77,
+ flags: ElementFlags.IsExcluded);
+
+ // Assert
+ var refRow = reference.Get(refCursor);
+ var row = _metaDb.Get(cursor);
+
+ Assert.Equal(refRow.TokenType, row.TokenType);
+ Assert.Equal(refRow.ParentRow, row.ParentRow);
+ Assert.Equal(refRow.OperationReferenceId, row.OperationReferenceId);
+ Assert.Equal(refRow.OperationReferenceType, row.OperationReferenceType);
+ Assert.Equal(refRow.Flags, row.Flags);
+ Assert.Equal(refRow.Location, row.Location);
+ Assert.Equal(refRow.SizeOrLength, row.SizeOrLength);
+ Assert.Equal(refRow.NumberOfRows, row.NumberOfRows);
+ }
+
+ [Fact]
+ public void AppendEmptyPropertyWithNullValue_WritesTwoLinkedRows()
+ {
+ // Act
+ var propCursor = _metaDb.AppendEmptyPropertyWithNullValue(
+ parentRow: 5,
+ selectionId: 11,
+ flags: ElementFlags.IsNullable);
+
+ // Assert — PropertyName row
+ var propRow = _metaDb.Get(propCursor);
+ Assert.Equal(ElementTokenType.PropertyName, propRow.TokenType);
+ Assert.Equal(5, propRow.ParentRow);
+ Assert.Equal(11, propRow.OperationReferenceId);
+ Assert.Equal(OperationReferenceType.Selection, propRow.OperationReferenceType);
+ Assert.Equal(ElementFlags.IsNullable, propRow.Flags);
+
+ // Assert — None value row with parent = PropertyName cursor
+ var valueCursor = Cursor.FromIndex(propCursor.Index + 1);
+ var valueRow = _metaDb.Get(valueCursor);
+ Assert.Equal(ElementTokenType.None, valueRow.TokenType);
+ Assert.Equal(propCursor.Index, valueRow.ParentRow);
+ Assert.Equal(0, valueRow.Location);
+ Assert.Equal(0, valueRow.OperationReferenceId);
+ Assert.Equal(ElementFlags.None, valueRow.Flags);
+
+ // Cursor advanced by 2
+ Assert.Equal(propCursor.Index + 2, _metaDb.NextCursor.Index);
+ }
+
+ [Fact]
+ public void AppendEmptyPropertyWithNullValue_IsEquivalentToTwoGenericAppends()
+ {
+ // Arrange
+ using var reference = MetaDb.CreateForEstimatedRows(10);
+ var refProp = reference.Append(
+ ElementTokenType.PropertyName,
+ parentRow: 21,
+ operationReferenceId: 3,
+ operationReferenceType: OperationReferenceType.Selection,
+ flags: ElementFlags.IsInternal);
+ var refNull = reference.Append(
+ ElementTokenType.None,
+ parentRow: refProp.Index);
+
+ // Act
+ var propCursor = _metaDb.AppendEmptyPropertyWithNullValue(
+ parentRow: 21,
+ selectionId: 3,
+ flags: ElementFlags.IsInternal);
+ var valueCursor = Cursor.FromIndex(propCursor.Index + 1);
+
+ // Assert property rows match byte-for-byte
+ var refPropRow = reference.Get(refProp);
+ var propRow = _metaDb.Get(propCursor);
+ Assert.Equal(refPropRow.TokenType, propRow.TokenType);
+ Assert.Equal(refPropRow.ParentRow, propRow.ParentRow);
+ Assert.Equal(refPropRow.OperationReferenceId, propRow.OperationReferenceId);
+ Assert.Equal(refPropRow.OperationReferenceType, propRow.OperationReferenceType);
+ Assert.Equal(refPropRow.Flags, propRow.Flags);
+
+ // Assert value rows match byte-for-byte
+ var refNullRow = reference.Get(refNull);
+ var valueRow = _metaDb.Get(valueCursor);
+ Assert.Equal(refNullRow.TokenType, valueRow.TokenType);
+ Assert.Equal(refNullRow.ParentRow, valueRow.ParentRow);
+ }
+
+ [Fact]
+ public void AppendEmptyPropertyWithNullValue_FallsBackAcrossChunkBoundary()
+ {
+ // Arrange — fill current chunk so only 1 row fits, forcing slow path.
+ using var metaDb = MetaDb.CreateForEstimatedRows(4);
+ const int rowsPerChunk = 128 * 1024 / 20;
+
+ // Fill all but one slot in the first chunk
+ for (var i = 0; i < rowsPerChunk - 1; i++)
+ {
+ metaDb.AppendNull(0);
+ }
+
+ // Act — pair cannot fit, so the slow path runs (single-row Append + AppendNull).
+ var propCursor = metaDb.AppendEmptyPropertyWithNullValue(
+ parentRow: 0,
+ selectionId: 7,
+ flags: ElementFlags.None);
+ var valueCursor = Cursor.FromIndex(propCursor.Index + 1);
+
+ // Assert — correctness preserved across boundary
+ var propRow = metaDb.Get(propCursor);
+ var valueRow = metaDb.Get(valueCursor);
+ Assert.Equal(ElementTokenType.PropertyName, propRow.TokenType);
+ Assert.Equal(7, propRow.OperationReferenceId);
+ Assert.Equal(ElementTokenType.None, valueRow.TokenType);
+ Assert.Equal(propCursor.Index, valueRow.ParentRow);
+ }
+
+ [Fact]
+ public void AppendNull_FollowsChunkBoundary()
+ {
+ // Arrange — fill the first chunk and verify AppendNull keeps advancing into chunk 2.
+ using var metaDb = MetaDb.CreateForEstimatedRows(4);
+ const int rowsPerChunk = 128 * 1024 / 20;
+
+ for (var i = 0; i < rowsPerChunk + 5; i++)
+ {
+ metaDb.AppendNull(i);
+ }
+
+ // Assert — every row readable with expected parent
+ for (var i = 0; i < rowsPerChunk + 5; i++)
+ {
+ var row = metaDb.Get(Cursor.FromIndex(i));
+ Assert.Equal(ElementTokenType.None, row.TokenType);
+ Assert.Equal(i, row.ParentRow);
+ }
+ }
+
+ [Fact]
+ public void AppendStartObject_WritesAllFields()
+ {
+ // Act
+ var cursor = _metaDb.AppendStartObject(
+ parentRow: 5,
+ selectionSetId: 42,
+ propertyCount: 3,
+ flags: ElementFlags.IsRoot);
+
+ // Assert
+ var row = _metaDb.Get(cursor);
+ Assert.Equal(ElementTokenType.StartObject, row.TokenType);
+ Assert.Equal(5, row.ParentRow);
+ Assert.Equal(42, row.OperationReferenceId);
+ Assert.Equal(OperationReferenceType.SelectionSet, row.OperationReferenceType);
+ Assert.Equal(ElementFlags.IsRoot, row.Flags);
+ Assert.Equal(3, row.SizeOrLength);
+ Assert.Equal(7, row.NumberOfRows);
+ Assert.Equal(0, row.SourceDocumentId);
+ }
+
+ [Fact]
+ public void AppendStartObject_IsEquivalentToGenericAppend()
+ {
+ // Arrange
+ using var reference = MetaDb.CreateForEstimatedRows(10);
+ var refCursor = reference.Append(
+ ElementTokenType.StartObject,
+ sizeOrLength: 4,
+ parentRow: 11,
+ operationReferenceId: 7,
+ operationReferenceType: OperationReferenceType.SelectionSet,
+ numberOfRows: 9,
+ flags: ElementFlags.IsInternal);
+
+ // Act
+ var cursor = _metaDb.AppendStartObject(
+ parentRow: 11,
+ selectionSetId: 7,
+ propertyCount: 4,
+ flags: ElementFlags.IsInternal);
+
+ // Assert
+ var refRow = reference.Get(refCursor);
+ var row = _metaDb.Get(cursor);
+ Assert.Equal(refRow.TokenType, row.TokenType);
+ Assert.Equal(refRow.ParentRow, row.ParentRow);
+ Assert.Equal(refRow.OperationReferenceId, row.OperationReferenceId);
+ Assert.Equal(refRow.OperationReferenceType, row.OperationReferenceType);
+ Assert.Equal(refRow.Flags, row.Flags);
+ Assert.Equal(refRow.SizeOrLength, row.SizeOrLength);
+ Assert.Equal(refRow.NumberOfRows, row.NumberOfRows);
+ }
+
+ [Fact]
+ public void AppendStartArray_WritesAllFields()
+ {
+ // Act
+ var cursor = _metaDb.AppendStartArray(
+ parentRow: 3,
+ length: 10,
+ flags: ElementFlags.IsNullable);
+
+ // Assert
+ var row = _metaDb.Get(cursor);
+ Assert.Equal(ElementTokenType.StartArray, row.TokenType);
+ Assert.Equal(3, row.ParentRow);
+ Assert.Equal(10, row.SizeOrLength);
+ Assert.Equal(11, row.NumberOfRows);
+ Assert.Equal(ElementFlags.IsNullable, row.Flags);
+ Assert.Equal(0, row.OperationReferenceId);
+ Assert.Equal(OperationReferenceType.None, row.OperationReferenceType);
+ }
+
+ [Fact]
+ public void AppendStartArray_IsEquivalentToGenericAppend()
+ {
+ // Arrange
+ using var reference = MetaDb.CreateForEstimatedRows(10);
+ var refCursor = reference.Append(
+ ElementTokenType.StartArray,
+ sizeOrLength: 7,
+ parentRow: 2,
+ numberOfRows: 8,
+ flags: ElementFlags.IsRoot);
+
+ // Act
+ var cursor = _metaDb.AppendStartArray(
+ parentRow: 2,
+ length: 7,
+ flags: ElementFlags.IsRoot);
+
+ // Assert
+ var refRow = reference.Get(refCursor);
+ var row = _metaDb.Get(cursor);
+ Assert.Equal(refRow.TokenType, row.TokenType);
+ Assert.Equal(refRow.ParentRow, row.ParentRow);
+ Assert.Equal(refRow.SizeOrLength, row.SizeOrLength);
+ Assert.Equal(refRow.NumberOfRows, row.NumberOfRows);
+ Assert.Equal(refRow.Flags, row.Flags);
+ }
+
+ [Fact]
+ public void AppendEndObject_WritesOnlyTokenType()
+ {
+ // Act
+ var cursor = _metaDb.AppendEndObject();
+
+ // Assert
+ var row = _metaDb.Get(cursor);
+ Assert.Equal(ElementTokenType.EndObject, row.TokenType);
+ Assert.Equal(0, row.ParentRow);
+ Assert.Equal(0, row.SizeOrLength);
+ Assert.Equal(0, row.NumberOfRows);
+ Assert.Equal(0, row.Location);
+ Assert.Equal(0, row.OperationReferenceId);
+ Assert.Equal(ElementFlags.None, row.Flags);
+ Assert.Equal(0, row.SourceDocumentId);
+ }
+
+ [Fact]
+ public void AppendEndArray_WritesOnlyTokenType()
+ {
+ // Act
+ var cursor = _metaDb.AppendEndArray();
+
+ // Assert
+ var row = _metaDb.Get(cursor);
+ Assert.Equal(ElementTokenType.EndArray, row.TokenType);
+ Assert.Equal(0, row.ParentRow);
+ Assert.Equal(0, row.SizeOrLength);
+ Assert.Equal(0, row.NumberOfRows);
+ }
+
+ [Fact]
+ public void AppendNullRange_WithZeroCount_IsNoOp()
+ {
+ // Act
+ _metaDb.AppendNullRange(parentRow: 5, count: 0);
+
+ // Assert
+ Assert.Equal(0, _metaDb.NextCursor.Index);
+ }
+
+ [Fact]
+ public void AppendNullRange_WritesNLinkedRows()
+ {
+ // Act
+ _metaDb.AppendNullRange(parentRow: 9, count: 5);
+
+ // Assert
+ Assert.Equal(5, _metaDb.NextCursor.Index);
+ for (var i = 0; i < 5; i++)
+ {
+ var row = _metaDb.Get(Cursor.FromIndex(i));
+ Assert.Equal(ElementTokenType.None, row.TokenType);
+ Assert.Equal(9, row.ParentRow);
+ Assert.Equal(0, row.Location);
+ Assert.Equal(0, row.SizeOrLength);
+ Assert.Equal(ElementFlags.None, row.Flags);
+ }
+ }
+
+ [Fact]
+ public void AppendNullRange_IsEquivalentToLoopOfAppendNull()
+ {
+ // Arrange
+ using var reference = MetaDb.CreateForEstimatedRows(10);
+ for (var i = 0; i < 7; i++)
+ {
+ reference.AppendNull(13);
+ }
+
+ // Act
+ _metaDb.AppendNullRange(parentRow: 13, count: 7);
+
+ // Assert — rows match index-for-index
+ for (var i = 0; i < 7; i++)
+ {
+ var refRow = reference.Get(Cursor.FromIndex(i));
+ var row = _metaDb.Get(Cursor.FromIndex(i));
+ Assert.Equal(refRow.TokenType, row.TokenType);
+ Assert.Equal(refRow.ParentRow, row.ParentRow);
+ }
+ }
+
+ [Fact]
+ public void AppendNullRange_FallsBackAcrossChunkBoundary()
+ {
+ // Arrange — fill most of the first chunk so the range crosses into chunk 2.
+ using var metaDb = MetaDb.CreateForEstimatedRows(4);
+ const int rowsPerChunk = 128 * 1024 / 20;
+
+ for (var i = 0; i < rowsPerChunk - 3; i++)
+ {
+ metaDb.AppendNull(0);
+ }
+
+ // Act — request 10 rows; only 3 fit in the current chunk, so slow path runs.
+ metaDb.AppendNullRange(parentRow: 42, count: 10);
+
+ // Assert — all 10 rows written correctly across the boundary.
+ const int firstNullRangeIndex = rowsPerChunk - 3;
+ for (var i = 0; i < 10; i++)
+ {
+ var row = metaDb.Get(Cursor.FromIndex(firstNullRangeIndex + i));
+ Assert.Equal(ElementTokenType.None, row.TokenType);
+ Assert.Equal(42, row.ParentRow);
+ }
+ }
+
public void Dispose() => _metaDb.Dispose();
}