diff --git a/.github/workflows/benchmarks.yml b/.github/workflows/benchmarks.yml
new file mode 100644
index 000000000..c3fc4c907
--- /dev/null
+++ b/.github/workflows/benchmarks.yml
@@ -0,0 +1,38 @@
+name: benchmarks
+
+on:
+ workflow_dispatch:
+ pull_request_target:
+ push:
+
+permissions:
+ contents: read
+
+env:
+ DOTNET_NOLOGO: 1
+ DOTNET_CLI_TELEMETRY_OPTOUT: 1
+ DOTNET_GENERATE_ASPNET_CERTIFICATE: 0
+ ContinuousIntegrationBuild: true
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+
+ strategy:
+ matrix:
+ configuration: [Debug, Release]
+
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v5
+
+ - name: Setup dotnet
+ uses: actions/setup-dotnet@v5
+ with:
+ dotnet-version: |
+ 8
+ 10
+
+ - name: Build benchmarks
+ id: build
+ run: dotnet build ./Tests/NetOffice.Benchmarks/NetOffice.Benchmarks.csproj -c '${{ matrix.configuration }}'
diff --git a/Tests/NetOffice.Benchmarks/CoreVariants.cs b/Tests/NetOffice.Benchmarks/CoreVariants.cs
new file mode 100644
index 000000000..cc868fefe
--- /dev/null
+++ b/Tests/NetOffice.Benchmarks/CoreVariants.cs
@@ -0,0 +1,192 @@
+using System;
+using System.Collections.Generic;
+using System.Collections.Concurrent;
+using System.Linq;
+using NetOffice;
+
+namespace NetOffice.Benchmarks
+{
+ ///
+ /// Base interface for Core collection variants to enable polymorphic benchmarking
+ ///
+ internal interface ICoreCollectionVariant
+ {
+ void AddObject(ICOMObject obj);
+ bool RemoveObject(ICOMObject obj);
+ void Clear();
+ int Count { get; }
+ }
+
+ ///
+ /// Variant 1: Current implementation using List (baseline)
+ /// Time: O(n) per removal
+ /// Memory: O(n)
+ ///
+ internal class ListCoreVariant : ICoreCollectionVariant
+ {
+ private readonly List _globalObjectList = new List();
+ private readonly object _lock = new object();
+
+ public void AddObject(ICOMObject obj)
+ {
+ lock (_lock)
+ {
+ _globalObjectList.Add(obj);
+ }
+ }
+
+ public bool RemoveObject(ICOMObject obj)
+ {
+ lock (_lock)
+ {
+ return _globalObjectList.Remove(obj);
+ }
+ }
+
+ public void Clear()
+ {
+ lock (_lock)
+ {
+ _globalObjectList.Clear();
+ }
+ }
+
+ public int Count
+ {
+ get
+ {
+ lock (_lock)
+ {
+ return _globalObjectList.Count;
+ }
+ }
+ }
+ }
+
+ ///
+ /// Variant 2: HashSet implementation (proposed solution)
+ /// Time: O(1) per removal (average)
+ /// Memory: O(n) with higher constant factor
+ /// Requires proper GetHashCode() and Equals() implementation
+ ///
+ internal class HashSetCoreVariant : ICoreCollectionVariant
+ {
+ private readonly HashSet _globalObjectList = new HashSet();
+ private readonly object _lock = new object();
+
+ public void AddObject(ICOMObject obj)
+ {
+ lock (_lock)
+ {
+ _globalObjectList.Add(obj);
+ }
+ }
+
+ public bool RemoveObject(ICOMObject obj)
+ {
+ lock (_lock)
+ {
+ return _globalObjectList.Remove(obj);
+ }
+ }
+
+ public void Clear()
+ {
+ lock (_lock)
+ {
+ _globalObjectList.Clear();
+ }
+ }
+
+ public int Count
+ {
+ get
+ {
+ lock (_lock)
+ {
+ return _globalObjectList.Count;
+ }
+ }
+ }
+ }
+
+ ///
+ /// Variant 3: Dictionary keyed by IntPtr (alternative)
+ /// Time: O(1) per removal by key
+ /// Memory: O(n)
+ /// Benefit: Can key by COM pointer for guaranteed uniqueness
+ ///
+ internal class DictionaryCoreVariant : ICoreCollectionVariant
+ {
+ private readonly Dictionary _globalObjectList = new Dictionary();
+ private readonly object _lock = new object();
+
+ public void AddObject(ICOMObject obj)
+ {
+ lock (_lock)
+ {
+ int key = obj.GetHashCode();
+ _globalObjectList[key] = obj;
+ }
+ }
+
+ public bool RemoveObject(ICOMObject obj)
+ {
+ lock (_lock)
+ {
+ int key = obj.GetHashCode();
+ return _globalObjectList.Remove(key);
+ }
+ }
+
+ public void Clear()
+ {
+ lock (_lock)
+ {
+ _globalObjectList.Clear();
+ }
+ }
+
+ public int Count
+ {
+ get
+ {
+ lock (_lock)
+ {
+ return _globalObjectList.Count;
+ }
+ }
+ }
+ }
+
+ ///
+ /// Variant 4: ConcurrentDictionary (lock-free alternative)
+ /// Time: O(1) per removal (average)
+ /// Memory: O(n) with higher overhead
+ /// Benefit: Reduces lock contention with built-in thread-safety
+ ///
+ internal class ConcurrentDictionaryCoreVariant : ICoreCollectionVariant
+ {
+ private readonly ConcurrentDictionary _globalObjectList =
+ new ConcurrentDictionary();
+
+ public void AddObject(ICOMObject obj)
+ {
+ int key = obj.GetHashCode();
+ _globalObjectList[key] = obj;
+ }
+
+ public bool RemoveObject(ICOMObject obj)
+ {
+ int key = obj.GetHashCode();
+ return _globalObjectList.TryRemove(key, out _);
+ }
+
+ public void Clear()
+ {
+ _globalObjectList.Clear();
+ }
+
+ public int Count => _globalObjectList.Count;
+ }
+}
diff --git a/Tests/NetOffice.Benchmarks/MemoryBenchmark.cs b/Tests/NetOffice.Benchmarks/MemoryBenchmark.cs
new file mode 100644
index 000000000..769595075
--- /dev/null
+++ b/Tests/NetOffice.Benchmarks/MemoryBenchmark.cs
@@ -0,0 +1,254 @@
+using System;
+using System.Collections.Generic;
+using BenchmarkDotNet.Attributes;
+using BenchmarkDotNet.Jobs;
+using NetOffice;
+
+namespace NetOffice.Benchmarks
+{
+ ///
+ /// Scenario D: Memory Allocation Benchmark
+ /// Compares memory overhead and GC pressure between different collection implementations.
+ ///
+ [MemoryDiagnoser]
+ [SimpleJob(RuntimeMoniker.Net48)]
+ [SimpleJob(RuntimeMoniker.Net80)]
+ [SimpleJob(RuntimeMoniker.Net10_0)]
+ public class MemoryBenchmark
+ {
+ [Params(10, 100, 1000, 10000)]
+ public int ObjectCount;
+
+ ///
+ /// Memory footprint for List implementation
+ ///
+ [Benchmark(Baseline = true, Description = "List - Memory Footprint")]
+ public int MemoryFootprint_List()
+ {
+ var variant = new ListCoreVariant();
+ var objects = new List(ObjectCount);
+
+ // Create and add objects
+ for (int i = 0; i < ObjectCount; i++)
+ {
+ var obj = new MockCOMObject();
+ objects.Add(obj);
+ variant.AddObject(obj);
+ }
+
+ // Perform some operations to measure steady-state memory
+ int count = variant.Count;
+
+ // Remove half the objects to measure memory behavior
+ for (int i = 0; i < ObjectCount / 2; i++)
+ {
+ variant.RemoveObject(objects[i]);
+ }
+
+ return variant.Count;
+ }
+
+ ///
+ /// Memory footprint for HashSet implementation
+ ///
+ [Benchmark(Description = "HashSet - Memory Footprint")]
+ public int MemoryFootprint_HashSet()
+ {
+ var variant = new HashSetCoreVariant();
+ var objects = new List(ObjectCount);
+
+ // Create and add objects
+ for (int i = 0; i < ObjectCount; i++)
+ {
+ var obj = new MockCOMObject();
+ objects.Add(obj);
+ variant.AddObject(obj);
+ }
+
+ // Perform some operations to measure steady-state memory
+ int count = variant.Count;
+
+ // Remove half the objects to measure memory behavior
+ for (int i = 0; i < ObjectCount / 2; i++)
+ {
+ variant.RemoveObject(objects[i]);
+ }
+
+ return variant.Count;
+ }
+
+ ///
+ /// Memory footprint for Dictionary implementation
+ ///
+ [Benchmark(Description = "Dictionary - Memory Footprint")]
+ public int MemoryFootprint_Dictionary()
+ {
+ var variant = new DictionaryCoreVariant();
+ var objects = new List(ObjectCount);
+
+ // Create and add objects
+ for (int i = 0; i < ObjectCount; i++)
+ {
+ var obj = new MockCOMObject();
+ objects.Add(obj);
+ variant.AddObject(obj);
+ }
+
+ // Perform some operations to measure steady-state memory
+ int count = variant.Count;
+
+ // Remove half the objects to measure memory behavior
+ for (int i = 0; i < ObjectCount / 2; i++)
+ {
+ variant.RemoveObject(objects[i]);
+ }
+
+ return variant.Count;
+ }
+
+ ///
+ /// Memory footprint for ConcurrentDictionary implementation
+ ///
+ [Benchmark(Description = "ConcurrentDictionary - Memory Footprint")]
+ public int MemoryFootprint_ConcurrentDictionary()
+ {
+ var variant = new ConcurrentDictionaryCoreVariant();
+ var objects = new List(ObjectCount);
+
+ // Create and add objects
+ for (int i = 0; i < ObjectCount; i++)
+ {
+ var obj = new MockCOMObject();
+ objects.Add(obj);
+ variant.AddObject(obj);
+ }
+
+ // Perform some operations to measure steady-state memory
+ int count = variant.Count;
+
+ // Remove half the objects to measure memory behavior
+ for (int i = 0; i < ObjectCount / 2; i++)
+ {
+ variant.RemoveObject(objects[i]);
+ }
+
+ return variant.Count;
+ }
+
+ ///
+ /// Test allocation patterns - List
+ ///
+ [Benchmark(Description = "List - Allocation Pattern")]
+ public void AllocationPattern_List()
+ {
+ var variant = new ListCoreVariant();
+
+ // Simulate typical lifecycle: add, use, remove
+ for (int cycle = 0; cycle < 10; cycle++)
+ {
+ var objects = new List();
+
+ // Add phase
+ for (int i = 0; i < ObjectCount / 10; i++)
+ {
+ var obj = new MockCOMObject();
+ objects.Add(obj);
+ variant.AddObject(obj);
+ }
+
+ // Remove phase
+ foreach (var obj in objects)
+ {
+ variant.RemoveObject(obj);
+ }
+ }
+ }
+
+ ///
+ /// Test allocation patterns - HashSet
+ ///
+ [Benchmark(Description = "HashSet - Allocation Pattern")]
+ public void AllocationPattern_HashSet()
+ {
+ var variant = new HashSetCoreVariant();
+
+ // Simulate typical lifecycle: add, use, remove
+ for (int cycle = 0; cycle < 10; cycle++)
+ {
+ var objects = new List();
+
+ // Add phase
+ for (int i = 0; i < ObjectCount / 10; i++)
+ {
+ var obj = new MockCOMObject();
+ objects.Add(obj);
+ variant.AddObject(obj);
+ }
+
+ // Remove phase
+ foreach (var obj in objects)
+ {
+ variant.RemoveObject(obj);
+ }
+ }
+ }
+
+ ///
+ /// Test allocation patterns - Dictionary
+ ///
+ [Benchmark(Description = "Dictionary - Allocation Pattern")]
+ public void AllocationPattern_Dictionary()
+ {
+ var variant = new DictionaryCoreVariant();
+
+ // Simulate typical lifecycle: add, use, remove
+ for (int cycle = 0; cycle < 10; cycle++)
+ {
+ var objects = new List();
+
+ // Add phase
+ for (int i = 0; i < ObjectCount / 10; i++)
+ {
+ var obj = new MockCOMObject();
+ objects.Add(obj);
+ variant.AddObject(obj);
+ }
+
+ // Remove phase
+ foreach (var obj in objects)
+ {
+ variant.RemoveObject(obj);
+ }
+ }
+ }
+
+ ///
+ /// Test allocation patterns - ConcurrentDictionary
+ ///
+ [Benchmark(Description = "ConcurrentDictionary - Allocation Pattern")]
+ public void AllocationPattern_ConcurrentDictionary()
+ {
+ var variant = new ConcurrentDictionaryCoreVariant();
+
+ // Simulate typical lifecycle: add, use, remove
+ for (int cycle = 0; cycle < 10; cycle++)
+ {
+ var objects = new List();
+
+ // Add phase
+ for (int i = 0; i < ObjectCount / 10; i++)
+ {
+ var obj = new MockCOMObject();
+ objects.Add(obj);
+ variant.AddObject(obj);
+ }
+
+ // Remove phase
+ foreach (var obj in objects)
+ {
+ variant.RemoveObject(obj);
+ }
+ }
+ }
+ }
+}
diff --git a/Tests/NetOffice.Benchmarks/MockCOMObject.cs b/Tests/NetOffice.Benchmarks/MockCOMObject.cs
new file mode 100644
index 000000000..0e2a37fe9
--- /dev/null
+++ b/Tests/NetOffice.Benchmarks/MockCOMObject.cs
@@ -0,0 +1,135 @@
+using System;
+using System.Collections.Generic;
+using NetOffice;
+using NetOffice.Exceptions;
+
+namespace NetOffice.Benchmarks
+{
+ ///
+ /// Mock implementation of ICOMObject for benchmarking purposes.
+ /// This is a minimal implementation focusing on the properties needed for collection operations.
+ ///
+ internal class MockCOMObject : ICOMObject
+ {
+ private static int _instanceCounter = 0;
+ private readonly int _id;
+ private bool _isDisposed;
+
+ public MockCOMObject()
+ {
+ _id = System.Threading.Interlocked.Increment(ref _instanceCounter);
+ }
+
+ // ICOMObjectProxy implementation
+ public object UnderlyingObject => new object();
+ public Type UnderlyingType => typeof(object);
+ public string UnderlyingTypeName => "MockCOMObject";
+ public string UnderlyingFriendlyTypeName => "MockCOMObject";
+ public string UnderlyingComponentName => "NetOffice.Benchmarks";
+ public string InstanceName => $"MockCOMObject_{_id}";
+ public string InstanceFriendlyName => $"MockCOMObject_{_id}";
+ public string InstanceComponentName => "NetOffice.Benchmarks";
+ public Type InstanceType => typeof(MockCOMObject);
+
+ // ICOMObjectDisposable implementation
+ public event OnDisposeEventHandler? OnDispose;
+ public bool IsDisposed => _isDisposed;
+ public bool IsCurrentlyDisposing { get; private set; }
+
+ public void Dispose()
+ {
+ Dispose(true);
+ }
+
+ public void Dispose(bool disposeEventBinding)
+ {
+ if (_isDisposed || IsCurrentlyDisposing)
+ return;
+
+ IsCurrentlyDisposing = true;
+ try
+ {
+ // OnDisposeEventArgs constructor is internal, so we can't invoke the event properly
+ // For benchmarking purposes, we'll skip event invocation
+ _isDisposed = true;
+ }
+ finally
+ {
+ IsCurrentlyDisposing = false;
+ }
+ }
+
+ // ICOMObject implementation
+ public object SyncRoot => this;
+ public Core? Factory { get; set; }
+ public Invoker? Invoker { get; set; }
+ public Settings? Settings { get; set; }
+ public DebugConsole? Console { get; set; }
+
+ public T To() where T : class, ICOMObject
+ {
+ throw new NotImplementedException("MockCOMObject does not support conversion");
+ }
+
+ public object Clone()
+ {
+ return new MockCOMObject();
+ }
+
+ // ICOMObjectTable implementation
+ public ICOMObject? ParentObject { get; set; }
+ public IEnumerable ChildObjects => Array.Empty();
+
+ public void AddChildObject(ICOMObject childObject)
+ {
+ // No-op for mock
+ }
+
+ public bool RemoveChildObject(ICOMObject childObject)
+ {
+ return false; // No-op for mock
+ }
+
+ // ICOMObjectTableDisposable implementation
+ public void DisposeChildInstances()
+ {
+ // No-op for mock
+ }
+
+ public void DisposeChildInstances(bool disposeEventBinding)
+ {
+ // No-op for mock
+ }
+
+ // ICOMObjectEvents implementation
+ public bool IsEventBinding => false;
+ public bool IsEventBridgeInitialized => false;
+ public bool IsWithEventRecipients => false;
+
+ // ICOMObjectAvailability implementation
+ public bool EntityIsAvailable(string name)
+ {
+ return true;
+ }
+
+ public bool EntityIsAvailable(string name, Availability.SupportedEntityType searchType)
+ {
+ return true;
+ }
+
+ // Override GetHashCode and Equals for use in HashSet
+ public override int GetHashCode()
+ {
+ return _id.GetHashCode();
+ }
+
+ public override bool Equals(object? obj)
+ {
+ if (obj is MockCOMObject other)
+ {
+ return _id == other._id;
+ }
+ return false;
+ }
+ }
+}
diff --git a/Tests/NetOffice.Benchmarks/NetOffice.Benchmarks.csproj b/Tests/NetOffice.Benchmarks/NetOffice.Benchmarks.csproj
new file mode 100644
index 000000000..046877622
--- /dev/null
+++ b/Tests/NetOffice.Benchmarks/NetOffice.Benchmarks.csproj
@@ -0,0 +1,20 @@
+
+
+ Exe
+ net48;net8.0;net10.0
+ NetOffice.Benchmarks
+ Performance benchmarks for NetOffice Core
+ false
+ false
+ latest
+ enable
+
+
+
+
+
+
+
+
+
+
diff --git a/Tests/NetOffice.Benchmarks/Program.cs b/Tests/NetOffice.Benchmarks/Program.cs
new file mode 100644
index 000000000..8e49ec774
--- /dev/null
+++ b/Tests/NetOffice.Benchmarks/Program.cs
@@ -0,0 +1,101 @@
+using System;
+using BenchmarkDotNet.Running;
+using BenchmarkDotNet.Configs;
+using BenchmarkDotNet.Exporters;
+using BenchmarkDotNet.Exporters.Csv;
+
+namespace NetOffice.Benchmarks
+{
+ ///
+ /// Entry point for NetOffice.Benchmarks
+ /// Benchmarks the performance of RemoveObjectFromList with different collection implementations
+ /// to address Issue #221: https://github.com/NetOfficeFw/NetOffice/issues/221
+ ///
+ class Program
+ {
+ static void Main(string[] args)
+ {
+ Console.WriteLine("=======================================================");
+ Console.WriteLine("NetOffice RemoveObjectFromList Performance Benchmarks");
+ Console.WriteLine("Issue #221: Core.RemoveObjectFromList Performance");
+ Console.WriteLine("=======================================================");
+ Console.WriteLine();
+ Console.WriteLine("This benchmark suite compares four collection implementations:");
+ Console.WriteLine(" 1. List - Current implementation (baseline)");
+ Console.WriteLine(" 2. HashSet - Proposed solution");
+ Console.WriteLine(" 3. Dictionary - Alternative with IntPtr key");
+ Console.WriteLine(" 4. ConcurrentDictionary - Lock-free alternative");
+ Console.WriteLine();
+ Console.WriteLine("Scenarios tested:");
+ Console.WriteLine(" A. Sequential Removal - Worst case: N objects removed one-by-one");
+ Console.WriteLine(" B. Bulk Disposal - Removing from end (current pattern)");
+ Console.WriteLine(" C. Mixed Operations - 70% adds, 30% removes interleaved");
+ Console.WriteLine(" D. Memory Allocation - Memory overhead and GC pressure");
+ Console.WriteLine();
+ Console.WriteLine("Object counts: 10, 100, 1,000, 10,000");
+ Console.WriteLine("Target frameworks: .NET Framework 4.8, .NET 8.0, .NET 10.0");
+ Console.WriteLine();
+
+ // Configure exporters for comprehensive results
+ var config = DefaultConfig.Instance
+ .AddExporter(MarkdownExporter.GitHub)
+ .AddExporter(HtmlExporter.Default)
+ .AddExporter(CsvExporter.Default);
+
+ if (args.Length > 0)
+ {
+ // Run specific benchmark class if provided
+ switch (args[0].ToLowerInvariant())
+ {
+ case "removal":
+ case "main":
+ Console.WriteLine("Running main removal benchmarks (Scenarios A, B, C)...");
+ BenchmarkRunner.Run(config);
+ break;
+
+ case "memory":
+ case "mem":
+ Console.WriteLine("Running memory benchmarks (Scenario D)...");
+ BenchmarkRunner.Run(config);
+ break;
+
+ case "all":
+ Console.WriteLine("Running all benchmarks...");
+ BenchmarkRunner.Run(config);
+ BenchmarkRunner.Run(config);
+ break;
+
+ default:
+ Console.WriteLine($"Unknown benchmark: {args[0]}");
+ ShowUsage();
+ return;
+ }
+ }
+ else
+ {
+ // Default: Run all benchmarks
+ Console.WriteLine("Running all benchmarks...");
+ Console.WriteLine();
+ BenchmarkRunner.Run(config);
+ BenchmarkRunner.Run(config);
+ }
+
+ Console.WriteLine();
+ Console.WriteLine("=======================================================");
+ Console.WriteLine("Benchmarks completed!");
+ Console.WriteLine("Results have been saved to BenchmarkDotNet.Artifacts/");
+ Console.WriteLine("=======================================================");
+ }
+
+ static void ShowUsage()
+ {
+ Console.WriteLine("Usage: NetOffice.Benchmarks [benchmark]");
+ Console.WriteLine();
+ Console.WriteLine("Benchmarks:");
+ Console.WriteLine(" removal, main - Run main removal benchmarks (Scenarios A, B, C)");
+ Console.WriteLine(" memory, mem - Run memory benchmarks (Scenario D)");
+ Console.WriteLine(" all - Run all benchmarks (default)");
+ Console.WriteLine();
+ }
+ }
+}
diff --git a/Tests/NetOffice.Benchmarks/README.md b/Tests/NetOffice.Benchmarks/README.md
new file mode 100644
index 000000000..a6cb2a4e6
--- /dev/null
+++ b/Tests/NetOffice.Benchmarks/README.md
@@ -0,0 +1,148 @@
+# NetOffice.Benchmarks
+
+Performance benchmarks for NetOffice Core, specifically addressing [Issue #221](https://github.com/NetOfficeFw/NetOffice/issues/221): `Core.RemoveObjectFromList` performance bottleneck.
+
+## Overview
+
+This benchmark project compares the performance of different collection implementations for the `_globalObjectList` in `Core.cs`. The current implementation uses `List`, which results in O(n²) complexity when disposing parent objects with multiple children.
+
+## Problem Statement
+
+The current `List` implementation causes performance issues because:
+- Each `Remove()` operation is O(n)
+- Disposing N children results in cumulative O(n²) time complexity
+- This becomes a bottleneck when working with large COM object hierarchies
+
+## Implementations Tested
+
+1. **List** (Baseline) - Current implementation
+ - Time: O(n) per removal
+ - Memory: O(n)
+ - Thread-safety: Manual locking
+
+2. **HashSet** (Proposed)
+ - Time: O(1) per removal (average)
+ - Memory: O(n) with higher constant factor
+ - Thread-safety: Manual locking
+ - Requires proper `GetHashCode()` and `Equals()` implementation
+
+3. **Dictionary** (Alternative)
+ - Time: O(1) per removal by key
+ - Memory: O(n)
+ - Thread-safety: Manual locking
+ - Uses object's hash code as key for guaranteed lookup speed
+
+4. **ConcurrentDictionary** (Lock-free)
+ - Time: O(1) per removal (average)
+ - Memory: O(n) with higher overhead
+ - Thread-safety: Built-in lock-free operations
+ - Reduces lock contention in multi-threaded scenarios
+
+## Benchmark Scenarios
+
+### Scenario A: Sequential Removal
+Simulates disposing a parent with N children (worst case for List).
+- **Test**: Add N objects, then remove them all one by one
+- **Expected**: List shows O(n²) complexity, others show O(n)
+
+### Scenario B: Bulk Disposal
+Tests `DisposeAllCOMProxies()` behavior with removal from end.
+- **Test**: Add N objects, remove from end in while loop
+- **Expected**: Better performance for List than sequential, similar for others
+
+### Scenario C: Mixed Operations
+Real-world usage pattern with interleaved operations.
+- **Test**: 70% adds, 30% removes with random order
+- **Expected**: Shows practical performance under lock contention
+
+### Scenario D: Memory Allocation
+Compares memory overhead and GC pressure.
+- **Test**: Memory footprint and allocation patterns
+- **Expected**: HashSet/Dictionary have higher base memory but better scaling
+
+## Test Parameters
+
+- **Object counts**: 10, 100, 1,000, 10,000
+- **Target frameworks**: .NET Framework 4.8, .NET 8.0
+- **Metrics**:
+ - Mean/Median execution time
+ - Memory allocations
+ - GC collections
+ - Operations per second
+
+## Running the Benchmarks
+
+### Prerequisites
+- .NET SDK 8.0 or later (for .NET 10 support)
+- .NET Framework 4.8 Developer Pack (for .NET Framework targets)
+- Admin/elevated permissions may be required for accurate profiling
+
+### Commands
+
+Run all benchmarks:
+```bash
+cd Tests/NetOffice.Benchmarks
+dotnet run -c Release
+```
+
+Run specific benchmark suite:
+```bash
+# Main removal benchmarks (Scenarios A, B, C)
+dotnet run -c Release -- removal
+
+# Memory benchmarks (Scenario D)
+dotnet run -c Release -- memory
+
+# All benchmarks explicitly
+dotnet run -c Release -- all
+```
+
+### Important Notes
+
+1. **Always use Release configuration** for accurate benchmarks
+2. **Close unnecessary applications** to reduce noise
+3. **Run on battery power** (laptops) or disable power-saving features
+4. **Disable antivirus scanning** of the output directory if possible
+5. Benchmarks will take **10-30 minutes** to complete depending on hardware
+
+## Output
+
+Results are saved to `BenchmarkDotNet.Artifacts/results/`:
+- `*.html` - Interactive HTML reports
+- `*.md` - GitHub-flavored Markdown reports
+- `*.csv` - Raw data for further analysis
+- `*-report-github.md` - Summary report for GitHub issues
+
+## Files
+
+- **`Program.cs`** - Entry point and benchmark runner
+- **`RemoveObjectListBenchmark.cs`** - Main removal benchmarks (Scenarios A, B, C)
+- **`MemoryBenchmark.cs`** - Memory allocation benchmarks (Scenario D)
+- **`CoreVariants.cs`** - Different collection implementations
+- **`MockCOMObject.cs`** - Minimal ICOMObject implementation for testing
+
+## Expected Results
+
+Based on theoretical analysis:
+
+| Scenario | List (O(n²)) | HashSet/Dict (O(n)) | Improvement |
+|----------|-------------|---------------------|-------------|
+| 10 objects | ~1 ms | ~0.1 ms | 10x |
+| 100 objects | ~10 ms | ~1 ms | 10x |
+| 1,000 objects | ~1 s | ~10 ms | 100x |
+| 10,000 objects | ~100 s | ~100 ms | 1000x |
+
+## Next Steps
+
+1. Run benchmarks and analyze results
+2. Create detailed performance analysis report
+3. Update Issue #221 with findings
+4. Recommend implementation change based on data
+5. Consider implementing the chosen solution
+
+## References
+
+- Issue: https://github.com/NetOfficeFw/NetOffice/issues/221
+- Code: `Source/NetOffice/Core.cs:96, 1346-1367`
+- BenchmarkDotNet: https://benchmarkdotnet.org/
+- Plan: `.github/prompts/plan-NetOfficeBenchmark.md`
diff --git a/Tests/NetOffice.Benchmarks/RemoveObjectListBenchmark.cs b/Tests/NetOffice.Benchmarks/RemoveObjectListBenchmark.cs
new file mode 100644
index 000000000..ccfdee03f
--- /dev/null
+++ b/Tests/NetOffice.Benchmarks/RemoveObjectListBenchmark.cs
@@ -0,0 +1,340 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using BenchmarkDotNet.Attributes;
+using BenchmarkDotNet.Jobs;
+using NetOffice;
+
+namespace NetOffice.Benchmarks
+{
+ ///
+ /// Benchmarks for RemoveObjectFromList performance comparing different collection implementations.
+ /// Tests various scenarios: Sequential removal, Bulk disposal, Mixed operations, and Memory allocation.
+ ///
+ [MemoryDiagnoser]
+ [SimpleJob(RuntimeMoniker.Net48)]
+ [SimpleJob(RuntimeMoniker.Net80)]
+ [SimpleJob(RuntimeMoniker.Net10_0)]
+ public class RemoveObjectListBenchmark
+ {
+ [Params(10, 100, 1000, 10000)]
+ public int ObjectCount;
+
+ private List? _objects;
+
+ [GlobalSetup]
+ public void GlobalSetup()
+ {
+ // Pre-create objects to avoid allocation overhead in benchmarks
+ _objects = new List(ObjectCount);
+ for (int i = 0; i < ObjectCount; i++)
+ {
+ _objects.Add(new MockCOMObject());
+ }
+ }
+
+ #region Scenario A: Sequential Removal (Worst case for List)
+
+ ///
+ /// Scenario A: Sequential Removal with List (Baseline - Current Implementation)
+ /// Simulates disposing a parent with N children.
+ /// Expected: O(n²) complexity - performance degrades quadratically
+ ///
+ [Benchmark(Baseline = true, Description = "List - Sequential Removal")]
+ public void SequentialRemoval_List()
+ {
+ var variant = new ListCoreVariant();
+
+ // Add all objects
+ foreach (var obj in _objects!)
+ {
+ variant.AddObject(obj);
+ }
+
+ // Remove all objects one by one (simulates child disposal)
+ foreach (var obj in _objects)
+ {
+ variant.RemoveObject(obj);
+ }
+ }
+
+ ///
+ /// Scenario A: Sequential Removal with HashSet
+ /// Expected: O(n) complexity - linear performance
+ ///
+ [Benchmark(Description = "HashSet - Sequential Removal")]
+ public void SequentialRemoval_HashSet()
+ {
+ var variant = new HashSetCoreVariant();
+
+ // Add all objects
+ foreach (var obj in _objects!)
+ {
+ variant.AddObject(obj);
+ }
+
+ // Remove all objects one by one
+ foreach (var obj in _objects)
+ {
+ variant.RemoveObject(obj);
+ }
+ }
+
+ ///
+ /// Scenario A: Sequential Removal with Dictionary
+ /// Expected: O(n) complexity - linear performance
+ ///
+ [Benchmark(Description = "Dictionary - Sequential Removal")]
+ public void SequentialRemoval_Dictionary()
+ {
+ var variant = new DictionaryCoreVariant();
+
+ // Add all objects
+ foreach (var obj in _objects!)
+ {
+ variant.AddObject(obj);
+ }
+
+ // Remove all objects one by one
+ foreach (var obj in _objects)
+ {
+ variant.RemoveObject(obj);
+ }
+ }
+
+ ///
+ /// Scenario A: Sequential Removal with ConcurrentDictionary
+ /// Expected: O(n) complexity - linear performance with reduced lock contention
+ ///
+ [Benchmark(Description = "ConcurrentDictionary - Sequential Removal")]
+ public void SequentialRemoval_ConcurrentDictionary()
+ {
+ var variant = new ConcurrentDictionaryCoreVariant();
+
+ // Add all objects
+ foreach (var obj in _objects!)
+ {
+ variant.AddObject(obj);
+ }
+
+ // Remove all objects one by one
+ foreach (var obj in _objects)
+ {
+ variant.RemoveObject(obj);
+ }
+ }
+
+ #endregion
+
+ #region Scenario B: Bulk Disposal
+
+ ///
+ /// Scenario B: Bulk Disposal with List
+ /// Tests DisposeAllCOMProxies() pattern - removing from end in while loop
+ ///
+ [Benchmark(Description = "List - Bulk Disposal")]
+ public void BulkDisposal_List()
+ {
+ var variant = new ListCoreVariant();
+
+ // Add all objects
+ foreach (var obj in _objects!)
+ {
+ variant.AddObject(obj);
+ }
+
+ // Remove from end (better for List than from beginning)
+ for (int i = _objects.Count - 1; i >= 0; i--)
+ {
+ variant.RemoveObject(_objects[i]);
+ }
+ }
+
+ ///
+ /// Scenario B: Bulk Disposal with HashSet
+ ///
+ [Benchmark(Description = "HashSet - Bulk Disposal")]
+ public void BulkDisposal_HashSet()
+ {
+ var variant = new HashSetCoreVariant();
+
+ // Add all objects
+ foreach (var obj in _objects!)
+ {
+ variant.AddObject(obj);
+ }
+
+ // Remove from end
+ for (int i = _objects.Count - 1; i >= 0; i--)
+ {
+ variant.RemoveObject(_objects[i]);
+ }
+ }
+
+ ///
+ /// Scenario B: Bulk Disposal with Dictionary
+ ///
+ [Benchmark(Description = "Dictionary - Bulk Disposal")]
+ public void BulkDisposal_Dictionary()
+ {
+ var variant = new DictionaryCoreVariant();
+
+ // Add all objects
+ foreach (var obj in _objects!)
+ {
+ variant.AddObject(obj);
+ }
+
+ // Remove from end
+ for (int i = _objects.Count - 1; i >= 0; i--)
+ {
+ variant.RemoveObject(_objects[i]);
+ }
+ }
+
+ ///
+ /// Scenario B: Bulk Disposal with ConcurrentDictionary
+ ///
+ [Benchmark(Description = "ConcurrentDictionary - Bulk Disposal")]
+ public void BulkDisposal_ConcurrentDictionary()
+ {
+ var variant = new ConcurrentDictionaryCoreVariant();
+
+ // Add all objects
+ foreach (var obj in _objects!)
+ {
+ variant.AddObject(obj);
+ }
+
+ // Remove from end
+ for (int i = _objects.Count - 1; i >= 0; i--)
+ {
+ variant.RemoveObject(_objects[i]);
+ }
+ }
+
+ #endregion
+
+ #region Scenario C: Mixed Operations
+
+ ///
+ /// Scenario C: Mixed Operations with List
+ /// Real-world usage: 70% adds, 30% removes interleaved
+ ///
+ [Benchmark(Description = "List - Mixed Operations")]
+ public void MixedOperations_List()
+ {
+ var variant = new ListCoreVariant();
+ var random = new Random(42); // Fixed seed for reproducibility
+ var addedObjects = new List();
+
+ for (int i = 0; i < ObjectCount; i++)
+ {
+ if (random.NextDouble() < 0.7 || addedObjects.Count == 0)
+ {
+ // Add operation (70% of time)
+ var obj = _objects![i % _objects.Count];
+ variant.AddObject(obj);
+ addedObjects.Add(obj);
+ }
+ else
+ {
+ // Remove operation (30% of time)
+ int removeIndex = random.Next(addedObjects.Count);
+ var objToRemove = addedObjects[removeIndex];
+ variant.RemoveObject(objToRemove);
+ addedObjects.RemoveAt(removeIndex);
+ }
+ }
+ }
+
+ ///
+ /// Scenario C: Mixed Operations with HashSet
+ ///
+ [Benchmark(Description = "HashSet - Mixed Operations")]
+ public void MixedOperations_HashSet()
+ {
+ var variant = new HashSetCoreVariant();
+ var random = new Random(42);
+ var addedObjects = new List();
+
+ for (int i = 0; i < ObjectCount; i++)
+ {
+ if (random.NextDouble() < 0.7 || addedObjects.Count == 0)
+ {
+ var obj = _objects![i % _objects.Count];
+ variant.AddObject(obj);
+ if (!addedObjects.Contains(obj))
+ addedObjects.Add(obj);
+ }
+ else
+ {
+ int removeIndex = random.Next(addedObjects.Count);
+ var objToRemove = addedObjects[removeIndex];
+ variant.RemoveObject(objToRemove);
+ addedObjects.RemoveAt(removeIndex);
+ }
+ }
+ }
+
+ ///
+ /// Scenario C: Mixed Operations with Dictionary
+ ///
+ [Benchmark(Description = "Dictionary - Mixed Operations")]
+ public void MixedOperations_Dictionary()
+ {
+ var variant = new DictionaryCoreVariant();
+ var random = new Random(42);
+ var addedObjects = new List();
+
+ for (int i = 0; i < ObjectCount; i++)
+ {
+ if (random.NextDouble() < 0.7 || addedObjects.Count == 0)
+ {
+ var obj = _objects![i % _objects.Count];
+ variant.AddObject(obj);
+ if (!addedObjects.Contains(obj))
+ addedObjects.Add(obj);
+ }
+ else
+ {
+ int removeIndex = random.Next(addedObjects.Count);
+ var objToRemove = addedObjects[removeIndex];
+ variant.RemoveObject(objToRemove);
+ addedObjects.RemoveAt(removeIndex);
+ }
+ }
+ }
+
+ ///
+ /// Scenario C: Mixed Operations with ConcurrentDictionary
+ ///
+ [Benchmark(Description = "ConcurrentDictionary - Mixed Operations")]
+ public void MixedOperations_ConcurrentDictionary()
+ {
+ var variant = new ConcurrentDictionaryCoreVariant();
+ var random = new Random(42);
+ var addedObjects = new List();
+
+ for (int i = 0; i < ObjectCount; i++)
+ {
+ if (random.NextDouble() < 0.7 || addedObjects.Count == 0)
+ {
+ var obj = _objects![i % _objects.Count];
+ variant.AddObject(obj);
+ if (!addedObjects.Contains(obj))
+ addedObjects.Add(obj);
+ }
+ else
+ {
+ int removeIndex = random.Next(addedObjects.Count);
+ var objToRemove = addedObjects[removeIndex];
+ variant.RemoveObject(objToRemove);
+ addedObjects.RemoveAt(removeIndex);
+ }
+ }
+ }
+
+ #endregion
+ }
+}