diff --git a/.github/CLAUDE.md b/.github/CLAUDE.md
new file mode 100644
index 00000000..da1bb1a1
--- /dev/null
+++ b/.github/CLAUDE.md
@@ -0,0 +1,230 @@
+# CLAUDE.md
+
+This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
+
+## Project Overview
+
+LogExpert is a Windows log file viewer and analyzer built with C# and Windows Forms. It's a GUI replacement for the Unix `tail` command with extensive features including tail mode, filtering, bookmarks, highlighting, and a plugin-based architecture for parsing custom log formats.
+
+**Key Technologies:**
+- .NET 10.0 (Windows target framework)
+- Windows Forms for UI
+- Nuke Build System with MSBuild
+- NUnit for testing
+- Plugin-based architecture
+
+## Build Commands
+
+### Using Nuke Build (Recommended)
+
+```powershell
+# Build the solution
+./build.ps1
+
+# Clean and build
+./build.ps1 --target Clean Compile
+
+# Run tests
+./build.ps1 --target Test
+
+# Full release build with packages
+./build.ps1 --target Clean Pack CreateSetup --configuration Release
+```
+
+### Using .NET CLI Directly
+
+```bash
+# From src/ directory
+dotnet restore
+dotnet build --no-restore
+dotnet test --no-build --verbosity normal
+
+# Run specific test project
+dotnet test src/LogExpert.Tests/LogExpert.Tests.csproj
+```
+
+### Important Build Notes
+
+- **Windows-only**: This project requires Windows and .NET 10.0.100 SDK (specified in [global.json](global.json))
+- **Cannot build on Linux/macOS**: Uses Windows Desktop SDK and Windows Forms
+- Nuke build automatically downloads the correct .NET SDK version if needed
+- Output directory: `bin/(Debug|Release)/`
+
+## Architecture
+
+### High-Level Structure
+
+The codebase follows a modular architecture with clear separation of concerns:
+
+```
+LogExpert/
+├── LogExpert/ # Main application entry point and UI orchestration
+├── LogExpert.Core/ # Core business logic, log reading, filtering
+├── LogExpert.UI/ # Windows Forms UI components and dialogs
+├── LogExpert.Resources/ # Localization resources
+├── LogExpert.Configuration/# Configuration management
+├── ColumnizerLib/ # Plugin interface definitions
+├── PluginRegistry/ # Plugin discovery and security
+└── Columnizers/ # Built-in columnizer plugins (CSV, JSON, Regex, etc.)
+```
+
+### Key Architectural Components
+
+#### 1. Single Instance Mode with IPC
+- Application uses a Mutex to ensure single instance per session
+- Named pipes (`LogExpertInstanceMutex{sessionId}`) for inter-process communication
+- Secondary instances send file paths to primary instance via JSON over named pipes
+- See [Program.cs](src/LogExpert/Program.cs) for implementation
+
+#### 2. Plugin System
+- **Columnizers** (`ILogLineColumnizer`): Parse log lines into columns
+- **File System Plugins** (`IFileSystemPlugin`): Support non-local file sources (e.g., SFTP)
+- **Context Menu Plugins** (`IContextMenuEntry`): Add custom menu items
+- **Keyword Actions** (`IKeywordAction`): React to keywords in logs
+- Plugin discovery happens at startup via `PluginRegistry`
+- Plugins are loaded from `plugins/` and `pluginsx86/` directories
+- Security: Plugin hashes are verified against generated hashes (Release builds only)
+- See [PLUGIN_DEVELOPMENT_GUIDE.md](src/docs/PLUGIN_DEVELOPMENT_GUIDE.md) for details
+
+#### 3. Log File Reading
+- Abstract base class: `PositionAwareStreamReaderBase`
+- Implementations: `PositionAwareStreamReaderSystem`, `PositionAwareStreamReaderLegacy`
+- Uses buffered streams for efficient reading of large files
+- Supports encoding detection (UTF-8, UTF-16, UTF-32 with BOM)
+- Position tracking for tail mode and seeking
+- See [src/LogExpert.Core/Classes/Log/](src/LogExpert.Core/Classes/Log/) for implementations
+
+#### 4. Configuration Management
+- Centralized via `ConfigManager.Instance`
+- Initialized with application startup path and screen information
+- Supports import/export of settings
+- Persists user preferences, columnizer history, highlight masks, etc.
+- Configuration stored in application startup directory (portable mode)
+
+#### 5. Windows Forms UI Architecture
+- MDI interface with tab support via `AbstractLogTabWindow`
+- Main window created in [LogTabWindow.cs](src/LogExpert.UI/Dialogs/LogTabWindow/)
+- Custom controls: `BufferedDataGridView`, `LogTabControl`, `DateTimeDragControl`
+- High DPI considerations: Avoid `AutoScaleMode` and `AutoScaleDimensions` on individual controls
+- Dark mode support via `Application.SetColorMode()`
+
+### Critical Files and Their Purposes
+
+- [Program.cs](src/LogExpert/Program.cs) - Application entry point, IPC setup, single instance handling
+- [AbstractLogTabWindow.cs](src/LogExpert.UI/Extensions/LogWindow/AbstractLogTabWindow.cs) - Main window factory and orchestration
+- [ILogLineColumnizer.cs](src/ColumnizerLib/ILogLineColumnizer.cs) - Core plugin interface for columnizers
+- [ColumnizerPicker.cs](src/LogExpert.Core/Classes/Columnizer/ColumnizerPicker.cs) - Automatic columnizer detection
+- [PluginRegistry.cs](src/PluginRegistry/) - Plugin discovery and security verification
+- [ConfigManager.cs](src/LogExpert.Configuration/) - Configuration persistence and management
+- [LogBuffer.cs](src/LogExpert.Core/Classes/Log/LogBuffer.cs) - In-memory log line caching
+
+## Development Workflow
+
+### Adding a New Columnizer Plugin
+
+1. Create new project in `src/` following naming pattern `*Columnizer`
+2. Add project reference to `ColumnizerLib`
+3. Implement `ILogLineColumnizer` interface
+4. Add project to `src/LogExpert.sln`
+5. Create corresponding test project in `Tests/` folder
+6. Plugin will be auto-discovered at runtime from output directory
+
+### Modifying Core Log Reading Logic
+
+- Core reading classes are in [src/LogExpert.Core/Classes/Log/](src/LogExpert.Core/Classes/Log/)
+- Inherit from `PositionAwareStreamReaderBase` for custom stream readers
+- Key methods to implement: `ReadLine()`, `Position` property, `Seek()`
+- Always maintain position tracking for tail mode support
+
+### Working with Windows Forms UI
+
+- UI components in `LogExpert.UI` project
+- Follow existing High DPI patterns (no AutoScale on controls)
+- Test with both light and dark mode (see `SetDarkMode()` in Program.cs)
+- Use localization resources from `LogExpert.Resources` project
+- Windows Forms designer files: `*.designer.cs`
+
+### Testing
+
+- Unit tests use NUnit framework with Moq for mocking
+- Test projects follow naming pattern `*.Tests`
+- Test data stored in `TestData/` directories within test projects
+- Run all tests: `./build.ps1 --target Test`
+- Run specific test: `dotnet test src/LogExpert.Tests/LogExpert.Tests.csproj`
+
+## Important Patterns and Conventions
+
+### Code Style
+- Nullable reference types enabled (`enable`)
+- Comprehensive `.editorconfig` with 4000+ rules
+- ImplicitUsings enabled
+- Assembly signing enabled (Key.snk)
+
+### Configuration Files
+- **Directory.Build.props** - Common MSBuild properties for all projects
+- **Directory.Packages.props** - Centralized NuGet package version management
+- **global.json** - .NET SDK version pinning (10.0.100)
+- **.editorconfig** - Code style and analysis rules
+
+### Project Organization
+- Solution folders: "Columnizers", "Tests", "docs", "setup"
+- Test projects nested under "Tests" solution folder
+- Columnizer projects nested under "Columnizers" solution folder
+- Documentation in `src/docs/` included in solution
+
+### Git Workflow
+- Default branch: `Development` (use for PRs)
+- Commit format: Include "Co-Authored-By: Claude Sonnet 4.5 "
+- GitHub Actions run on push to Development branch
+- AppVeyor for CI builds and artifact creation
+
+## Plugin Security System
+
+**Release builds only:**
+- After compilation, `PluginHashGenerator.Tool` generates SHA256 hashes of all plugins
+- Hashes stored in [PluginHashGenerator.Generated.cs](src/PluginRegistry/PluginHashGenerator.Generated.cs)
+- At runtime, `PluginRegistry` verifies plugin hashes before loading
+- Users can trust new plugins via UI dialog
+- Hash updates automated via GitHub Actions on successful builds
+- See [PLUGIN_HASH_MANAGEMENT.md](src/docs/PLUGIN_HASH_MANAGEMENT.md)
+
+## Common Gotchas
+
+1. **Cross-platform builds fail**: This is Windows-only. Don't attempt Linux/macOS builds.
+2. **SDK version mismatch**: Must use .NET 10.0.100 SDK (specified in global.json)
+3. **Plugin not loading**: Check output directory - plugins must be in `plugins/` or `pluginsx86/`
+4. **High DPI issues**: Never use AutoScaleMode on individual controls, only on forms
+5. **IPC failures**: Named pipes require proper Windows permissions and session isolation
+6. **Encoding detection**: BOM-less files default to encoding from EncodingOptions
+7. **Plugin hashes**: Only verified in Release builds; Debug builds skip verification
+
+## Key Dependencies
+
+- **NLog**: Logging framework
+- **Newtonsoft.Json**: JSON serialization (legacy, but widely used)
+- **CsvHelper**: CSV parsing in CsvColumnizer
+- **SSH.NET**: SFTP support in SftpFileSystem plugins
+- **DockPanelSuite**: Docking panel UI controls
+- **Moq/NUnit**: Testing frameworks
+
+## References
+
+- Main README: [README.md](README.md)
+- Plugin Development: [PLUGIN_DEVELOPMENT_GUIDE.md](src/docs/PLUGIN_DEVELOPMENT_GUIDE.md)
+- Plugin Hash System: [PLUGIN_HASH_MANAGEMENT.md](src/docs/PLUGIN_HASH_MANAGEMENT.md)
+- Performance Benchmarks: [BENCHMARK_SUMMARY.md](src/docs/performance/BENCHMARK_SUMMARY.md)
+- GitHub Wiki: https://github.com/LogExperts/LogExpert/wiki
+- Discord: https://discord.gg/SjxkuckRe9
+
+# Update Rules File
+To update this file, ensure that all sections are kept current with the latest architectural decisions, build processes, and development workflows. Follow these guidelines:
+- Review and update build commands if there are changes in the build system.
+- Reflect any architectural changes in the "Architecture" section.
+- Keep development workflow steps accurate for new contributors.
+- Regularly verify links to other documentation files.
+- Maintain clarity and conciseness for ease of understanding by new developers.
+- Use consistent formatting throughout the document.
+- Add new sections as needed for significant changes in the project structure or processes.
+- Ensure all technical terms are explained or linked to relevant documentation.
+- Periodically review for outdated information and remove or update as necessary.
+- If told to not do something, ensure this is also added to the "Dont Do that" section.
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index 28329487..feaa20e3 100644
--- a/.gitignore
+++ b/.gitignore
@@ -33,8 +33,8 @@ x86/
bld/
[Bb]in/
[Oo]bj/
-[Ll]og/
-[Ll]ogs/
+/[Ll]og/
+/[Ll]ogs/
# Visual Studio 2015/2017 cache/options directory
.vs/
@@ -618,3 +618,102 @@ temp.txt
/Data
/.tmp
/.nuke/temp/*.log
+
+# roots docs folder should be empty
+/docs/*
+
+# test project logs should be ignored, this is for rolling logfile tests
+/src/tools/LogRotator/logs/*
+
+######## START JET BRAINS git ignore https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
+/src/.idea/*
+
+# Covers JetBrains IDEs: IntelliJ, GoLand, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
+# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
+
+# User-specific stuff
+.idea/**/workspace.xml
+.idea/**/tasks.xml
+.idea/**/usage.statistics.xml
+.idea/**/dictionaries
+.idea/**/shelf
+
+# AWS User-specific
+.idea/**/aws.xml
+
+# Generated files
+.idea/**/contentModel.xml
+
+# Sensitive or high-churn files
+.idea/**/dataSources/
+.idea/**/dataSources.ids
+.idea/**/dataSources.local.xml
+.idea/**/sqlDataSources.xml
+.idea/**/dynamic.xml
+.idea/**/uiDesigner.xml
+.idea/**/dbnavigator.xml
+
+# Gradle
+.idea/**/gradle.xml
+.idea/**/libraries
+
+# Gradle and Maven with auto-import
+# When using Gradle or Maven with auto-import, you should exclude module files,
+# since they will be recreated, and may cause churn. Uncomment if using
+# auto-import.
+# .idea/artifacts
+# .idea/compiler.xml
+# .idea/jarRepositories.xml
+# .idea/modules.xml
+# .idea/*.iml
+# .idea/modules
+# *.iml
+# *.ipr
+
+# CMake
+cmake-build-*/
+
+# Mongo Explorer plugin
+.idea/**/mongoSettings.xml
+
+# File-based project format
+*.iws
+
+# IntelliJ
+out/
+
+# mpeltonen/sbt-idea plugin
+.idea_modules/
+
+# JIRA plugin
+atlassian-ide-plugin.xml
+
+# Cursive Clojure plugin
+.idea/replstate.xml
+
+# SonarLint plugin
+.idea/sonarlint/
+# see https://community.sonarsource.com/t/is-the-file-idea-idea-idea-sonarlint-xml-intended-to-be-under-source-control/121119
+.idea/sonarlint.xml
+
+# Crashlytics plugin (for Android Studio and IntelliJ)
+com_crashlytics_export_strings.xml
+crashlytics.properties
+crashlytics-build.properties
+fabric.properties
+
+# Editor-based HTTP Client
+.idea/httpRequests
+http-client.private.env.json
+
+# Android studio 3.1+ serialized cache file
+.idea/caches/build_file_checksums.ser
+
+# Apifox Helper cache
+.idea/.cache/.Apifox_Helper
+.idea/ApifoxUploaderProjectSetting.xml
+
+# Github Copilot persisted session migrations, see: https://github.com/microsoft/copilot-intellij-feedback/issues/712#issuecomment-3322062215
+.idea/**/copilot.data.migration.*.xml
+
+######## END JET BRAINS git ignore https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
\ No newline at end of file
diff --git a/.vscode/settings.json b/.vscode/settings.json
new file mode 100644
index 00000000..5edcd1f3
--- /dev/null
+++ b/.vscode/settings.json
@@ -0,0 +1,19 @@
+{
+ "chat.tools.terminal.autoApprove": {
+ "dotnet build": true,
+ "dotnet test": true,
+ "dotnet clean": true,
+ "Out-Null": true,
+ "ForEach-Object": true,
+ "Set-Content": true,
+ "Start-Process": true,
+ "Test-Path": true,
+ "type": true,
+ "New-Item": true,
+ "/^git stash; dotnet test src/LogExpert\\.Tests/LogExpert\\.Tests\\.csproj --filter \"TestShiftBuffers1\" --no-restore 2>&1 \\| Select-Object -Last 30$/": {
+ "approve": true,
+ "matchCommandLine": true
+ },
+ "git rev-parse": true
+ }
+}
\ No newline at end of file
diff --git a/src/AutoColumnizer/AutoColumnizer.cs b/src/AutoColumnizer/AutoColumnizer.cs
index 4bb349d9..d9670ca8 100644
--- a/src/AutoColumnizer/AutoColumnizer.cs
+++ b/src/AutoColumnizer/AutoColumnizer.cs
@@ -62,5 +62,9 @@ public void PushValue (ILogLineMemoryColumnizerCallback callback, int column, st
{
}
+ public void PushValue (ILogLineMemoryColumnizerCallback callback, int column, string value, ReadOnlyMemory oldValue)
+ {
+ }
+
#endregion ILogLineColumnizer implementation
}
\ No newline at end of file
diff --git a/src/ColumnizerLib/ILogLineMemoryColumnizer.cs b/src/ColumnizerLib/ILogLineMemoryColumnizer.cs
index 05985fb5..fe0db173 100644
--- a/src/ColumnizerLib/ILogLineMemoryColumnizer.cs
+++ b/src/ColumnizerLib/ILogLineMemoryColumnizer.cs
@@ -94,5 +94,17 @@ public interface ILogLineMemoryColumnizer
/// The previous value that was associated with the specified column before the update.
void PushValue (ILogLineMemoryColumnizerCallback callback, int column, string value, string oldValue);
+ //
+ ///
+ /// The callback interface that receives the value update notification. Cannot be null.
+ ///
+ /// The zero-based index of the column for which the value is being updated.
+ /// The new value to be associated with the specified column.
+ ///
+ /// The previous value associated with the specified
+ /// column before the update.
+ ///
+ void PushValue (ILogLineMemoryColumnizerCallback callback, int column, string value, ReadOnlyMemory oldValue);
+
#endregion
}
\ No newline at end of file
diff --git a/src/CsvColumnizer/CsvColumnizer.cs b/src/CsvColumnizer/CsvColumnizer.cs
index 4e3639e8..2f2d4649 100644
--- a/src/CsvColumnizer/CsvColumnizer.cs
+++ b/src/CsvColumnizer/CsvColumnizer.cs
@@ -164,6 +164,11 @@ public void PushValue (ILogLineMemoryColumnizerCallback callback, int column, st
throw new NotImplementedException();
}
+ public void PushValue (ILogLineMemoryColumnizerCallback callback, int column, string value, ReadOnlyMemory oldValue)
+ {
+ throw new NotImplementedException();
+ }
+
public void Selected (ILogLineMemoryColumnizerCallback callback)
{
ArgumentNullException.ThrowIfNull(callback, nameof(callback));
diff --git a/src/GlassfishColumnizer/GlassfishColumnizer.cs b/src/GlassfishColumnizer/GlassfishColumnizer.cs
index 46df4e83..f4c213aa 100644
--- a/src/GlassfishColumnizer/GlassfishColumnizer.cs
+++ b/src/GlassfishColumnizer/GlassfishColumnizer.cs
@@ -71,12 +71,18 @@ public string[] GetColumnNames ()
///
/// Creates a new log line instance with text formatted for clipboard copying.
///
- /// The returned log line replaces separator characters in the original line with the '|'
- /// character to ensure compatibility with clipboard operations.
+ ///
+ /// The returned log line replaces separator characters in the original line with the '|' character to ensure
+ /// compatibility with clipboard operations.
+ ///
/// The log line to be formatted for clipboard use. Cannot be null.
- /// A callback interface for columnizer operations. This parameter is reserved for future use and is not utilized in
- /// this method.
- /// A new instance containing the clipboard-formatted text of the specified log line.
+ ///
+ /// A callback interface for columnizer operations. This parameter is reserved for future use and is not utilized in
+ /// this method.
+ ///
+ ///
+ /// A new instance containing the clipboard-formatted text of the specified log line.
+ ///
public ILogLineMemory GetLineTextForClipboard (ILogLineMemory logLine, ILogLineMemoryColumnizerCallback callback)
{
return new GlassFishLogLine(ReplaceInMemory(logLine.FullLine, SEPARATOR_CHAR, '|'), logLine.Text, logLine.LineNumber);
@@ -85,13 +91,19 @@ public ILogLineMemory GetLineTextForClipboard (ILogLineMemory logLine, ILogLineM
///
/// Parses a log line into its constituent columns according to the columnizer's format.
///
- /// If the input line does not conform to the expected format or is too short, only the log
- /// message column is populated and date/time columns are left blank. The method is tolerant of malformed input and
- /// will not throw for common formatting issues.
- /// A callback interface used to provide context or services required during columnization.
+ ///
+ /// If the input line does not conform to the expected format or is too short, only the log message column is
+ /// populated and date/time columns are left blank. The method is tolerant of malformed input and will not throw for
+ /// common formatting issues.
+ ///
+ ///
+ /// A callback interface used to provide context or services required during columnization.
+ ///
/// The log line to be split into columns.
- /// An object representing the columnized log line, with each column populated based on the input line. If the line
- /// does not match the expected format, the entire line is placed in the log message column.
+ ///
+ /// An object representing the columnized log line, with each column populated based on the input line. If the line
+ /// does not match the expected format, the entire line is placed in the log message column.
+ ///
[System.Diagnostics.CodeAnalysis.SuppressMessage("Globalization", "CA1303:Do not pass literals as localized parameters", Justification = "Intentionally passed")]
public IColumnizedLogLineMemory SplitLine (ILogLineMemoryColumnizerCallback callback, ILogLineMemory logLine)
{
@@ -173,8 +185,9 @@ FormatException or
///
/// The memory to split
/// The separator character
- /// Array with 2 elements: [before separator, after separator].
- /// If separator not found, returns [input, Empty]
+ ///
+ /// Array with 2 elements: [before separator, after separator]. If separator not found, returns [input, Empty]
+ ///
private static ReadOnlyMemory[] SplitIntoTwo (ReadOnlyMemory input, char separator)
{
var span = input.Span;
@@ -212,15 +225,23 @@ public int GetTimeOffset ()
///
/// Extracts the timestamp from the specified log line using the expected GlassFish log format.
///
- /// The method expects the log line to contain a timestamp in a specific format, typically used
- /// by GlassFish logs. If the log line does not match the expected format or the timestamp cannot be parsed, the
- /// method returns DateTime.MinValue.
- /// A callback interface for columnizer operations. This parameter is not used by this method but is required by the
- /// interface.
- /// The log line from which to extract the timestamp. Must not be null and should contain a timestamp in the
- /// expected format.
- /// A DateTime value representing the parsed timestamp from the log line. Returns DateTime.MinValue if the timestamp
- /// cannot be extracted or parsed.
+ ///
+ /// The method expects the log line to contain a timestamp in a specific format, typically used by GlassFish logs.
+ /// If the log line does not match the expected format or the timestamp cannot be parsed, the method returns
+ /// DateTime.MinValue.
+ ///
+ ///
+ /// A callback interface for columnizer operations. This parameter is not used by this method but is required by the
+ /// interface.
+ ///
+ ///
+ /// The log line from which to extract the timestamp. Must not be null and should contain a timestamp in the
+ /// expected format.
+ ///
+ ///
+ /// A DateTime value representing the parsed timestamp from the log line. Returns DateTime.MinValue if the timestamp
+ /// cannot be extracted or parsed.
+ ///
public DateTime GetTimestamp (ILogLineMemoryColumnizerCallback callback, ILogLineMemory logLine)
{
var temp = logLine.FullLine;
@@ -268,23 +289,59 @@ public DateTime GetTimestamp (ILogLineMemoryColumnizerCallback callback, ILogLin
/// Updates the internal time offset based on the difference between the specified new and old values when the
/// column index is zero.
///
- /// If the column index is not zero, this method performs no action. For column 0, both value and
- /// oldValue must be valid date and time strings in the required format; otherwise, the time offset is not
- /// updated.
- /// The callback interface for columnizer operations. This parameter is not used in this method but may be required
- /// for interface compatibility.
- /// The zero-based index of the column to update. Only a value of 0 triggers a time offset update.
- /// The new value to apply. For column 0, this should be a date and time string in the expected format.
- /// The previous value to compare against. For column 0, this should be a date and time string in the expected
- /// format.
+ ///
+ /// If the column index is not zero, this method performs no action. For column 0, both value and oldValue must be
+ /// valid date and time strings in the required format; otherwise, the time offset is not updated.
+ ///
+ ///
+ /// The callback interface for columnizer operations. This parameter is not used in this method but may be required
+ /// for interface compatibility.
+ ///
+ ///
+ /// The zero-based index of the column to update. Only a value of 0 triggers a time offset update.
+ ///
+ ///
+ /// The new value to apply. For column 0, this should be a date and time string in the expected format.
+ ///
+ ///
+ /// The previous value to compare against. For column 0, this should be a date and time string in the expected
+ /// format.
+ ///
public void PushValue (ILogLineMemoryColumnizerCallback callback, int column, string value, string oldValue)
+ {
+ PushValue(callback, column, value, oldValue.AsMemory());
+ }
+
+ ///
+ /// Updates the internal time offset based on the difference between the specified new and old values when the
+ /// column index is zero.
+ ///
+ ///
+ /// If the column index is not zero, this method performs no action. For column 0, both value and oldValue must be
+ /// valid date and time strings in the required format; otherwise, the time offset is not updated.
+ ///
+ ///
+ /// The callback interface for columnizer operations. This parameter is not used in this method but may be required
+ /// for interface compatibility.
+ ///
+ ///
+ /// The zero-based index of the column to update. Only a value of 0 triggers a time offset update.
+ ///
+ ///
+ /// The new value to apply. For column 0, this should be a date and time string in the expected format.
+ ///
+ ///
+ /// The previous value to compare against. For column 0, this should be a date and time string in the expected
+ /// format.
+ ///
+ public void PushValue (ILogLineMemoryColumnizerCallback callback, int column, string value, ReadOnlyMemory oldValue)
{
if (column == 0)
{
try
{
var newDateTime = DateTime.ParseExact(value, DATETIME_FORMAT_OUT, _cultureInfo);
- var oldDateTime = DateTime.ParseExact(oldValue, DATETIME_FORMAT_OUT, _cultureInfo);
+ var oldDateTime = DateTime.ParseExact(oldValue.ToString(), DATETIME_FORMAT_OUT, _cultureInfo);
var mSecsOld = oldDateTime.Ticks / TimeSpan.TicksPerMillisecond;
var mSecsNew = newDateTime.Ticks / TimeSpan.TicksPerMillisecond;
_timeOffset = (int)(mSecsNew - mSecsOld);
diff --git a/src/JsonColumnizer/JsonColumnizer.cs b/src/JsonColumnizer/JsonColumnizer.cs
index 7eda8ccb..26af0ae4 100644
--- a/src/JsonColumnizer/JsonColumnizer.cs
+++ b/src/JsonColumnizer/JsonColumnizer.cs
@@ -166,6 +166,11 @@ public virtual void PushValue (ILogLineMemoryColumnizerCallback callback, int co
throw new NotImplementedException();
}
+ public virtual void PushValue (ILogLineMemoryColumnizerCallback callback, int column, string value, ReadOnlyMemory oldValue)
+ {
+ throw new NotImplementedException();
+ }
+
public virtual void Selected (ILogLineMemoryColumnizerCallback callback)
{
ArgumentNullException.ThrowIfNull(callback, nameof(callback));
diff --git a/src/Log4jXmlColumnizer/Log4jXmlColumnizer.cs b/src/Log4jXmlColumnizer/Log4jXmlColumnizer.cs
index ecec912b..ffd8f5ce 100644
--- a/src/Log4jXmlColumnizer/Log4jXmlColumnizer.cs
+++ b/src/Log4jXmlColumnizer/Log4jXmlColumnizer.cs
@@ -213,13 +213,18 @@ public DateTime GetTimestamp (ILogLineMemoryColumnizerCallback callback, ILogLin
}
public void PushValue (ILogLineMemoryColumnizerCallback callback, int column, string value, string oldValue)
+ {
+ PushValue(callback, column, value, oldValue.AsMemory());
+ }
+
+ public void PushValue (ILogLineMemoryColumnizerCallback callback, int column, string value, ReadOnlyMemory oldValue)
{
if (column == 0)
{
try
{
var newDateTime = DateTime.ParseExact(value, DATETIME_FORMAT, _cultureInfo);
- var oldDateTime = DateTime.ParseExact(oldValue, DATETIME_FORMAT, _cultureInfo);
+ var oldDateTime = DateTime.ParseExact(oldValue.ToString(), DATETIME_FORMAT, _cultureInfo);
var mSecsOld = oldDateTime.Ticks / TimeSpan.TicksPerMillisecond;
var mSecsNew = newDateTime.Ticks / TimeSpan.TicksPerMillisecond;
_timeOffset = (int)(mSecsNew - mSecsOld);
diff --git a/src/LogExpert.Benchmarks/BufferIndexBenchmarks.cs b/src/LogExpert.Benchmarks/BufferIndexBenchmarks.cs
new file mode 100644
index 00000000..1b808d3e
--- /dev/null
+++ b/src/LogExpert.Benchmarks/BufferIndexBenchmarks.cs
@@ -0,0 +1,191 @@
+using BenchmarkDotNet.Attributes;
+
+using ColumnizerLib;
+
+using LogExpert.Benchmarks.Support;
+using LogExpert.Core.Classes.Log.Buffers;
+
+namespace LogExpert.Benchmarks;
+
+[MemoryDiagnoser]
+[RankColumn]
+public class BufferIndexBenchmarks : IDisposable
+{
+ private BufferIndex _index = null!;
+ private int _totalLines;
+
+ private bool _disposed;
+
+ [Params(100, 1_000, 10_000)]
+ public int BufferCount { get; set; }
+
+ private const int LINES_PER_BUFFER = 500;
+
+ [GlobalSetup]
+ public void Setup ()
+ {
+ _index = new BufferIndex(BufferCount, LINES_PER_BUFFER);
+ _totalLines = BufferCount * LINES_PER_BUFFER;
+
+ var fakeFileInfo = new FakeLogFileInfo();
+
+ using (var writeLock = _index.AcquireWriteLock())
+ {
+ for (int i = 0; i < BufferCount; i++)
+ {
+ var buffer = new LogBuffer(fakeFileInfo, LINES_PER_BUFFER)
+ {
+ StartLine = i * LINES_PER_BUFFER
+ };
+
+ for (int j = 0; j < LINES_PER_BUFFER; j++)
+ {
+ buffer.AddLine(new LogLine($"line {i * LINES_PER_BUFFER + j}".AsMemory(), i * LINES_PER_BUFFER + j), 0);
+ }
+
+ _index.Add(buffer);
+ }
+ }
+
+ // Validate setup
+ var snapshot = _index.CreateSnapshot();
+ if (snapshot.BufferCount != BufferCount)
+ {
+ throw new InvalidOperationException($"Setup failed: expected {BufferCount} buffers, got {snapshot.BufferCount}");
+ }
+ }
+
+ [GlobalCleanup]
+ public void Cleanup () => _index.Dispose();
+
+ ///
+ /// Simulates tail-follow: reading the last 1000 lines sequentially.
+ /// Should hit Layer 0 (thread-local cache) ~99% of the time.
+ ///
+ [Benchmark(Baseline = true)]
+ public LogBuffer? SequentialAccess ()
+ {
+ using var readlock = _index.AcquireReadLock();
+ LogBuffer? last = null;
+ var start = Math.Max(0, _totalLines - 1000);
+ for (int i = start; i < _totalLines; i++)
+ {
+ var logBufferEntry = _index.TryFindBuffer(i);
+ if (logBufferEntry.Found)
+ {
+ last = logBufferEntry.Buffer;
+ }
+ }
+
+ return last;
+ }
+
+ ///
+ /// Simulates search/goto: deterministic stride across the full file.
+ /// Co-prime stride visits buffers in non-sequential, non-repeating order.
+ /// Exercises Layers 2 and 3 heavily.
+ ///
+ [Benchmark]
+ public LogBuffer? StrideAccess ()
+ {
+ using var readLock = _index.AcquireReadLock();
+ LogBuffer? last = null;
+ var stride = _totalLines / 3 + 1;
+ var lineNum = 0;
+ for (int i = 0; i < 1000; i++)
+ {
+ var logBufferEntry = _index.TryFindBuffer(lineNum);
+ if (logBufferEntry.Found)
+ {
+ last = logBufferEntry.Buffer;
+ }
+
+ lineNum = (lineNum + stride) % _totalLines;
+ }
+
+ return last;
+ }
+
+ ///
+ /// Worst case for Layer 0: always crossing buffer boundaries.
+ /// Exercises Layer 1 (adjacent prediction).
+ ///
+ [Benchmark]
+ public LogBuffer? BoundaryAccess ()
+ {
+ using var readLock = _index.AcquireReadLock();
+ LogBuffer? last = null;
+
+ for (int i = 0; i < 1000; i++)
+ {
+ int lineNum = i * (_totalLines / 1000);
+ var logBufferEntry = _index.TryFindBuffer(lineNum);
+ if (logBufferEntry.Found)
+ {
+ last = logBufferEntry.Buffer;
+ }
+ }
+
+ return last;
+ }
+
+ ///
+ /// Simulates UI scrolling: page-sized jumps forward through the file.
+ /// 50-line pages with 3x page jumps (fast scroll drag).
+ /// Exercises Layer 0 within pages and Layers 1-2 on transitions.
+ ///
+ [Benchmark]
+ public LogBuffer? ScrollAccess ()
+ {
+ using var readLock = _index.AcquireReadLock();
+ LogBuffer? last = null;
+ const int pageSize = 50;
+ const int pageJump = pageSize * 3;
+ var pageStart = 0;
+
+ for (int page = 0; page < 20 && pageStart < _totalLines; page++)
+ {
+ var pageEnd = Math.Min(pageStart + pageSize, _totalLines);
+ for (int line = pageStart; line < pageEnd; line++)
+ {
+ var logBufferEntry = _index.TryFindBuffer(line);
+ if (logBufferEntry.Found)
+ {
+ last = logBufferEntry.Buffer;
+ }
+ }
+
+ pageStart += pageJump;
+ }
+
+ return last;
+ }
+
+ ///
+ /// Measures LRU eviction cost at current scale.
+ ///
+ [Benchmark]
+ public void EvictAndRepopulate ()
+ {
+ _index.EvictLeastRecentlyUsed();
+ }
+
+ public void Dispose ()
+ {
+ Dispose(true);
+ GC.SuppressFinalize(this);
+ }
+
+ protected virtual void Dispose (bool disposing)
+ {
+ if (!_disposed)
+ {
+ if (disposing)
+ {
+ _index?.Dispose();
+ }
+
+ _disposed = true;
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/LogExpert.Benchmarks/BufferIndexContentionBenchmarks.cs b/src/LogExpert.Benchmarks/BufferIndexContentionBenchmarks.cs
new file mode 100644
index 00000000..9885ddeb
--- /dev/null
+++ b/src/LogExpert.Benchmarks/BufferIndexContentionBenchmarks.cs
@@ -0,0 +1,170 @@
+using BenchmarkDotNet.Attributes;
+using BenchmarkDotNet.Diagnosers;
+
+using ColumnizerLib;
+
+using LogExpert.Benchmarks.Support;
+using LogExpert.Core.Classes.Log.Buffers;
+
+namespace LogExpert.Benchmarks;
+
+///
+/// Measures ReaderWriterLockSlim contention under concurrent read load.
+/// Compares single-threaded throughput against N concurrent readers
+/// to determine if RWLS is a bottleneck worth optimizing.
+///
+[MemoryDiagnoser]
+[ThreadingDiagnoser] // Reports lock contention + thread pool stats
+[RankColumn]
+public class BufferIndexContentionBenchmarks : IDisposable
+{
+ private BufferIndex _index = null!;
+ private int _totalLines;
+ private bool _disposed;
+
+ private const int BUFFERS = 10_000;
+ private const int LINES_PER_BUFFER = 500;
+ private const int READS_PER_TASK = 1_000;
+
+ [GlobalSetup]
+ public void Setup ()
+ {
+ _index = new BufferIndex(BUFFERS, LINES_PER_BUFFER);
+ _totalLines = BUFFERS * LINES_PER_BUFFER;
+
+ var fakeFileInfo = new FakeLogFileInfo();
+ using var writeLock = _index.AcquireWriteLock();
+ for (int i = 0; i < BUFFERS; i++)
+ {
+ var buffer = new LogBuffer(fakeFileInfo, LINES_PER_BUFFER)
+ {
+ StartLine = i * LINES_PER_BUFFER
+ };
+ for (int j = 0; j < LINES_PER_BUFFER; j++)
+ {
+ buffer.AddLine(
+ new LogLine($"line {i * LINES_PER_BUFFER + j}".AsMemory(),
+ i * LINES_PER_BUFFER + j), 0);
+ }
+ _index.Add(buffer);
+ }
+ }
+
+ ///
+ /// Single-threaded baseline: sequential reads under one read lock.
+ /// This is the ideal throughput ceiling.
+ ///
+ [Benchmark(Baseline = true)]
+ public int SingleThreadedReads ()
+ {
+ int found = 0;
+ using var readLock = _index.AcquireReadLock();
+ var start = Math.Max(0, _totalLines - READS_PER_TASK);
+ for (int i = start; i < _totalLines; i++)
+ {
+ if (_index.TryFindBuffer(i).Found)
+ {
+ found++;
+ }
+ }
+
+ return found;
+ }
+
+ ///
+ /// N concurrent readers each acquiring their own read lock.
+ /// If RWLS has no contention, throughput ≈ N × single-threaded.
+ ///
+ [Benchmark]
+ [Arguments(2)]
+ [Arguments(4)]
+ [Arguments(8)]
+ [Arguments(12)]
+ public int ConcurrentReads (int threadCount)
+ {
+ var total = 0;
+ _ = Parallel.For(0, threadCount, _ =>
+ {
+ int found = 0;
+ using var readLock = _index.AcquireReadLock();
+ var start = Math.Max(0, _totalLines - READS_PER_TASK);
+ for (int i = start; i < _totalLines; i++)
+ {
+ if (_index.TryFindBuffer(i).Found)
+ {
+ found++;
+ }
+ }
+ _ = Interlocked.Add(ref total, found);
+ });
+ return total;
+ }
+
+ ///
+ /// Simulates production: N readers + 1 writer (tail-follow append).
+ /// Writer acquires write lock briefly every ~1000 reads.
+ /// This is the realistic contention scenario.
+ ///
+ [Benchmark]
+ [Arguments(4)]
+ [Arguments(8)]
+ public int ConcurrentReadsWithWriter (int readerCount)
+ {
+ using var cts = new CancellationTokenSource();
+ var total = 0;
+
+ // Writer task: periodically takes write lock (simulates new buffer append)
+ var writerTask = Task.Run(() =>
+ {
+ while (!cts.Token.IsCancellationRequested)
+ {
+ using var writeLock = _index.AcquireWriteLock();
+ // Simulate brief write work (no actual mutation to keep state clean)
+ Thread.SpinWait(100);
+ }
+ });
+
+ // Reader tasks
+ _ = Parallel.For(0, readerCount, _ =>
+ {
+ int found = 0;
+ using var readLock = _index.AcquireReadLock();
+ var start = Math.Max(0, _totalLines - READS_PER_TASK);
+ for (int i = start; i < _totalLines; i++)
+ {
+ if (_index.TryFindBuffer(i).Found)
+ {
+ found++;
+ }
+ }
+
+ _ = Interlocked.Add(ref total, found);
+ });
+
+ cts.Cancel();
+ writerTask.Wait();
+ return total;
+ }
+
+ [GlobalCleanup]
+ public void Cleanup () => _index.Dispose();
+
+ public void Dispose ()
+ {
+ Dispose(true);
+ GC.SuppressFinalize(this);
+ }
+
+ protected virtual void Dispose (bool disposing)
+ {
+ if (!_disposed)
+ {
+ if (disposing)
+ {
+ _index?.Dispose();
+ }
+
+ _disposed = true;
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/LogExpert.Benchmarks/LogExpert.Benchmarks.csproj b/src/LogExpert.Benchmarks/LogExpert.Benchmarks.csproj
index 1240cbd4..4bfb4225 100644
--- a/src/LogExpert.Benchmarks/LogExpert.Benchmarks.csproj
+++ b/src/LogExpert.Benchmarks/LogExpert.Benchmarks.csproj
@@ -16,6 +16,7 @@
+
diff --git a/src/LogExpert.Benchmarks/Program.cs b/src/LogExpert.Benchmarks/Program.cs
new file mode 100644
index 00000000..01954f88
--- /dev/null
+++ b/src/LogExpert.Benchmarks/Program.cs
@@ -0,0 +1,54 @@
+using BenchmarkDotNet.Running;
+
+namespace LogExpert.Benchmarks;
+
+public static class Program
+{
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Globalization", "CA1303:Do not pass literals as localized parameters", Justification = "Benchmarks")]
+ public static void Main (string[] args)
+ {
+ if (args == null || args.Length == 0)
+ {
+ Console.WriteLine("No benchmarks specified. Running all benchmarks...");
+
+ // Run all benchmarks if no arguments are provided
+ _ = BenchmarkRunner.Run();
+ _ = BenchmarkRunner.Run();
+ _ = BenchmarkRunner.Run();
+ _ = BenchmarkRunner.Run();
+ }
+ else
+ {
+ // Run specific benchmarks based on command-line arguments
+ _ = BenchmarkSwitcher.FromAssembly(typeof(Program).Assembly).Run(args);
+ }
+
+ Console.WriteLine("Replace with the name of the benchmark you want to run, e.g. ");
+ Console.WriteLine("StreamReaderBenchmarks: Benchmarks for stream readers");
+ Console.WriteLine("ReadThroughputBenchmarks: Benchmarks for read throughput");
+ Console.WriteLine("BufferIndexBenchmarks: Benchmarks for buffer index");
+ Console.WriteLine("BufferIndexContentionBenchmarks: Benchmarks for buffer index contention");
+ Console.WriteLine("Dry run:");
+ Console.WriteLine("dotnet run -c Release -- --filter \"**\" --job Dry --noOverwrite");
+ Console.WriteLine("Short run:");
+ Console.WriteLine("dotnet run -c Release -- --filter \"**\" --job Short --noOverwrite");
+ Console.WriteLine("Full baseline run:");
+ Console.WriteLine("dotnet run -c Release -- --filter \"**\" --noOverwrite");
+ }
+}
+
+/*
+ * Comment / Uncommen the benchmark to run, careful some can run longer
+ * 1.) a dry run
+ * dotnet run -c Release -- --filter "StreamReaderBenchmarks" --job Dry --noOverwrite
+ * 2.) a short run
+ * dotnet run -c Release -- --filter "StreamReaderBenchmarks" --job Short --noOverwrite
+ * 3.) a full baseline run
+ * dotnet run -c Release -- --filter "StreamReaderBenchmarks" --noOverwrite
+ *
+ * The full baseline run generates a MD file
+ * BenchmarkDotNet.Artifacts/results/*-report-github.md
+ *
+ * If changes are made with the LogfileReader / BufferIndex, always do a Benchmark to
+ * verify no performance regression is introduced, especially with large files.
+ */
diff --git a/src/LogExpert.Benchmarks/ReadThroughputBenchmarks.cs b/src/LogExpert.Benchmarks/ReadThroughputBenchmarks.cs
new file mode 100644
index 00000000..688e2bd5
--- /dev/null
+++ b/src/LogExpert.Benchmarks/ReadThroughputBenchmarks.cs
@@ -0,0 +1,151 @@
+using System.Text;
+
+using BenchmarkDotNet.Attributes;
+
+using LogExpert.Core.Classes.Log;
+using LogExpert.Core.Entities;
+using LogExpert.Core.Enums;
+
+namespace LogExpert.Benchmarks;
+
+///
+/// Measures LogfileReader.ReadFiles() throughput with different progress reporters.
+/// Uses real temp files to include actual I/O in the measurement.
+///
+[MemoryDiagnoser]
+[RankColumn]
+public class ReadThroughputBenchmarks
+{
+ private string _tempFile = null!;
+
+ [Params(10_000, 100_000, 1_000_000)]
+ public int LineCount { get; set; }
+
+ [GlobalSetup]
+ public void Setup ()
+ {
+ _tempFile = Path.GetTempFileName();
+ GenerateLogFile(_tempFile, LineCount);
+
+ // Initialize PluginRegistry for local file system support
+ // (or use NullPluginRegistry if constructor doesn't need it)
+ _ = PluginRegistry.PluginRegistry.Create(Path.GetDirectoryName(_tempFile)!, 500);
+ }
+
+ ///
+ /// Baseline: read with NullProgressReporter (zero event overhead).
+ ///
+ [Benchmark(Baseline = true)]
+ public int ReadWithNullReporter ()
+ {
+ using var reader = new LogfileReader(
+ _tempFile,
+ new EncodingOptions { Encoding = Encoding.UTF8 },
+ multiFile: false,
+ bufferCount: 500,
+ linesPerBuffer: 500,
+ new MultiFileOptions(),
+ ReaderType.System,
+ PluginRegistry.PluginRegistry.Instance,
+ maximumLineLength: 500,
+ progressReporter: Core.Classes.Log.ProgressReporters.NullProgressReporter.Instance);
+
+ reader.ReadFiles();
+ return reader.LineCount;
+ }
+
+ ///
+ /// Production path: read with PeriodicProgressReporter (default, no subscribers).
+ ///
+ [Benchmark]
+ public int ReadWithPeriodicReporter ()
+ {
+ using var reader = new LogfileReader(
+ _tempFile,
+ new EncodingOptions { Encoding = Encoding.UTF8 },
+ multiFile: false,
+ bufferCount: 500,
+ linesPerBuffer: 500,
+ new MultiFileOptions(),
+ ReaderType.System,
+ PluginRegistry.PluginRegistry.Instance,
+ maximumLineLength: 500);
+ // No progressReporter = default PeriodicProgressReporter
+
+ reader.ReadFiles();
+ return reader.LineCount;
+ }
+
+ ///
+ /// Post-change: read with block-based allocation (System reader uses CharBlockAllocator).
+ /// Compare Gen0/Gen1/Gen2 collections vs baseline to validate allocation reduction.
+ /// This method is identical to ReadWithNullReporter — it exists solely for explicit
+ /// before/after naming in benchmark reports.
+ ///
+ [Benchmark]
+ public int ReadWithBlockAllocation ()
+ {
+ using var reader = new LogfileReader(
+ _tempFile,
+ new EncodingOptions { Encoding = Encoding.UTF8 },
+ multiFile: false,
+ bufferCount: 500,
+ linesPerBuffer: 500,
+ new MultiFileOptions(),
+ ReaderType.System,
+ PluginRegistry.PluginRegistry.Instance,
+ maximumLineLength: 500,
+ progressReporter: Core.Classes.Log.ProgressReporters.NullProgressReporter.Instance);
+
+ reader.ReadFiles();
+ return reader.LineCount;
+ }
+
+ ///
+ /// Direct-read: reads decoded chars directly into pooled blocks without per-line string allocation.
+ /// Compare allocations and GC counts vs ReadWithBlockAllocation.
+ ///
+ [Benchmark]
+ public int ReadWithDirectRead ()
+ {
+ using var reader = new LogfileReader(
+ _tempFile,
+ new EncodingOptions { Encoding = Encoding.UTF8 },
+ multiFile: false,
+ bufferCount: 500,
+ linesPerBuffer: 500,
+ new MultiFileOptions(),
+ ReaderType.SystemDirect,
+ PluginRegistry.PluginRegistry.Instance,
+ maximumLineLength: 500,
+ progressReporter: Core.Classes.Log.ProgressReporters.NullProgressReporter.Instance);
+
+ reader.ReadFiles();
+ return reader.LineCount;
+ }
+
+ [GlobalCleanup]
+ public void Cleanup ()
+ {
+ if (File.Exists(_tempFile))
+ {
+ File.Delete(_tempFile);
+ }
+ }
+
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Security", "CA5394:Do not use insecure randomness", Justification = "Benchmark data generation")]
+ private static void GenerateLogFile (string path, int lineCount)
+ {
+ var rng = new Random(42); // deterministic seed for reproducibility
+ using var writer = new StreamWriter(path, false, Encoding.UTF8, bufferSize: 65536);
+ for (int i = 0; i < lineCount; i++)
+ {
+ writer.Write("2026-04-23 12:00:00.");
+ writer.Write(i % 1000);
+ writer.Write(" [INFO] Thread-");
+ writer.Write(rng.Next(1, 32));
+ writer.Write(" SomeNamespace.SomeClass - Log message number ");
+ writer.WriteLine(i);
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/LogExpert.Benchmarks/StreamReaderBenchmarks.cs b/src/LogExpert.Benchmarks/StreamReaderBenchmarks.cs
index c8a0b382..a82f0432 100644
--- a/src/LogExpert.Benchmarks/StreamReaderBenchmarks.cs
+++ b/src/LogExpert.Benchmarks/StreamReaderBenchmarks.cs
@@ -1,9 +1,8 @@
using System.Text;
using BenchmarkDotNet.Attributes;
-using BenchmarkDotNet.Running;
-using LogExpert.Core.Classes.Log;
+using LogExpert.Core.Classes.Log.Streamreaders;
using LogExpert.Core.Entities;
using LogExpert.Core.Interfaces;
@@ -150,12 +149,4 @@ private static void ReadAllLines (ILogStreamReader reader)
// Consume the line
}
}
-}
-
-public static class Program
-{
- public static void Main (string[] args)
- {
- _ = BenchmarkRunner.Run();
- }
-}
+}
\ No newline at end of file
diff --git a/src/LogExpert.Benchmarks/Support/FakeLogFileInfo.cs b/src/LogExpert.Benchmarks/Support/FakeLogFileInfo.cs
new file mode 100644
index 00000000..bd70a382
--- /dev/null
+++ b/src/LogExpert.Benchmarks/Support/FakeLogFileInfo.cs
@@ -0,0 +1,34 @@
+using ColumnizerLib;
+
+namespace LogExpert.Benchmarks.Support;
+
+///
+/// Minimal ILogFileInfo stub for benchmarks. No filesystem access.
+/// Wraps an in-memory byte array as the file content.
+///
+internal sealed class FakeLogFileInfo : ILogFileInfo
+{
+ private readonly byte[] _content;
+
+ public FakeLogFileInfo (string name = "fake.log", byte[]? content = null, long length = 1_000_000)
+ {
+ FullName = name;
+ _content = content ?? [];
+ Length = content?.Length ?? length;
+ OriginalLength = Length;
+ }
+
+ public string FullName { get; }
+ public string FileName => Path.GetFileName(FullName);
+ public string DirectoryName => Path.GetDirectoryName(FullName) ?? "";
+ public char DirectorySeparatorChar => Path.DirectorySeparatorChar;
+ public Uri Uri => new($"file:///{FullName}");
+ public long Length { get; set; }
+ public long OriginalLength { get; }
+ public bool FileExists => true;
+ public int PollInterval => 250;
+
+ public bool FileHasChanged () => false;
+ public Stream OpenStream () => new MemoryStream(_content, writable: false);
+ public ILogFileInfo GetRolloverInfo (string fileName) => new FakeLogFileInfo(fileName);
+}
\ No newline at end of file
diff --git a/src/LogExpert.Benchmarks/Support/NullPluginRegistry.cs b/src/LogExpert.Benchmarks/Support/NullPluginRegistry.cs
new file mode 100644
index 00000000..5c815054
--- /dev/null
+++ b/src/LogExpert.Benchmarks/Support/NullPluginRegistry.cs
@@ -0,0 +1,30 @@
+using ColumnizerLib;
+
+using LogExpert.Core.Interfaces;
+
+namespace LogExpert.Benchmarks.Support;
+
+///
+/// No-op IPluginRegistry for benchmarks. Returns empty columnizer list and
+/// a stub file system plugin that handles all URIs via local file system.
+///
+internal sealed class NullPluginRegistry : IPluginRegistry
+{
+ public static readonly NullPluginRegistry Instance = new();
+
+ public IList RegisteredColumnizers { get; } = [];
+
+ public IFileSystemPlugin FindFileSystemForUri (string fileNameOrUri) => NullFileSystemPlugin.Instance;
+
+ private sealed class NullFileSystemPlugin : IFileSystemPlugin
+ {
+ public static readonly NullFileSystemPlugin Instance = new();
+
+ public string Text => "Null";
+ public string Description => "No-op file system for benchmarks";
+ public bool CanHandleUri (string uriString) => true;
+
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Globalization", "CA1303:Do not pass literals as localized parameters", Justification = "For UnitTests")]
+ public ILogFileInfo GetLogfileInfo (string uriString) => throw new NotSupportedException("NullFileSystemPlugin does not support GetLogfileInfo");
+ }
+}
\ No newline at end of file
diff --git a/src/LogExpert.Core/Classes/Columnizer/ClfColumnizer.cs b/src/LogExpert.Core/Classes/Columnizer/ClfColumnizer.cs
index 3d299940..6cfcdda4 100644
--- a/src/LogExpert.Core/Classes/Columnizer/ClfColumnizer.cs
+++ b/src/LogExpert.Core/Classes/Columnizer/ClfColumnizer.cs
@@ -67,13 +67,19 @@ public string[] GetColumnNames ()
///
/// Extracts the timestamp from the specified log line using the provided callback.
///
- /// If the log line does not contain a valid timestamp in the expected column or format, the
- /// method returns DateTime.MinValue. The expected timestamp format and column position are determined by the
- /// implementation and may vary depending on the log source.
- /// A callback interface used to assist in parsing the log line and retrieving column information.
+ ///
+ /// If the log line does not contain a valid timestamp in the expected column or format, the method returns
+ /// DateTime.MinValue. The expected timestamp format and column position are determined by the implementation and
+ /// may vary depending on the log source.
+ ///
+ ///
+ /// A callback interface used to assist in parsing the log line and retrieving column information.
+ ///
/// The log line from which to extract the timestamp.
- /// A DateTime value representing the timestamp extracted from the log line. Returns DateTime.MinValue if the
- /// timestamp cannot be parsed or is not present.
+ ///
+ /// A DateTime value representing the timestamp extracted from the log line. Returns DateTime.MinValue if the
+ /// timestamp cannot be parsed or is not present.
+ ///
public DateTime GetTimestamp (ILogLineMemoryColumnizerCallback callback, ILogLineMemory logLine)
{
// Use SplitLine to parse, then extract timestamp column
@@ -104,14 +110,20 @@ FormatException or
///
/// Splits a log line into its constituent columns using the configured columnizer logic.
///
- /// If the input line does not match the expected format, the entire line is placed in the
- /// request column. For lines longer than 1024 characters, only the first 1024 characters are used for
- /// columnization. The method does not localize column values.
- /// A callback interface used to provide additional context or services required during columnization. Cannot be
- /// null.
+ ///
+ /// If the input line does not match the expected format, the entire line is placed in the request column. For lines
+ /// longer than 1024 characters, only the first 1024 characters are used for columnization. The method does not
+ /// localize column values.
+ ///
+ ///
+ /// A callback interface used to provide additional context or services required during columnization. Cannot be
+ /// null.
+ ///
/// The log line to be split into columns. Cannot be null.
- /// An object representing the columnized log line, with each column populated according to the parsed content of
- /// the input line.
+ ///
+ /// An object representing the columnized log line, with each column populated according to the parsed content of
+ /// the input line.
+ ///
[System.Diagnostics.CodeAnalysis.SuppressMessage("Globalization", "CA1303:Do not pass literals as localized parameters", Justification = "Intentionally Passed")]
public IColumnizedLogLineMemory SplitLine (ILogLineMemoryColumnizerCallback callback, ILogLineMemory logLine)
{
@@ -237,20 +249,27 @@ public string GetCustomName ()
///
/// Processes a value change for a specified column and notifies the callback of the update.
///
- /// If the column index is 2, the method attempts to interpret the values as date and time
- /// strings and calculates the time offset in milliseconds. No action is taken for other column indices.
+ ///
+ /// If the column index is 2, the method attempts to interpret the values as date and time strings and calculates
+ /// the time offset in milliseconds. No action is taken for other column indices.
+ ///
/// The callback interface used to handle column value updates.
/// The zero-based index of the column for which the value is being updated.
/// The new value to be set for the specified column.
/// The previous value of the specified column before the update.
public void PushValue (ILogLineMemoryColumnizerCallback callback, int column, string value, string oldValue)
+ {
+ PushValue(callback, column, value, oldValue.AsMemory());
+ }
+
+ public void PushValue (ILogLineMemoryColumnizerCallback callback, int column, string value, ReadOnlyMemory oldValue)
{
if (column == 2)
{
try
{
var newDateTime = DateTime.ParseExact(value, DATE_TIME_FORMAT, _cultureInfo);
- var oldDateTime = DateTime.ParseExact(oldValue, DATE_TIME_FORMAT, _cultureInfo);
+ var oldDateTime = DateTime.ParseExact(oldValue.ToString(), DATE_TIME_FORMAT, _cultureInfo);
var mSecsOld = oldDateTime.Ticks / TimeSpan.TicksPerMillisecond;
var mSecsNew = newDateTime.Ticks / TimeSpan.TicksPerMillisecond;
_timeOffset = (int)(mSecsNew - mSecsOld);
@@ -264,10 +283,11 @@ public void PushValue (ILogLineMemoryColumnizerCallback callback, int column, st
///
/// Provides a compiled regular expression used to parse lines matching a specific log entry format.
///
- /// The regular expression is precompiled for performance and is intended to extract fields from
- /// log lines with a fixed format. The pattern captures multiple groups, including text fields and quoted values.
- /// Use the returned to match and extract data from log entries conforming to this
- /// structure.
+ ///
+ /// The regular expression is precompiled for performance and is intended to extract fields from log lines with a
+ /// fixed format. The pattern captures multiple groups, including text fields and quoted values. Use the returned
+ /// to match and extract data from log entries conforming to this structure.
+ ///
/// A instance that matches lines with the expected log entry structure.
[GeneratedRegex("(.*) (-) (.*) (\\[.*\\]) (\".*\") (.*) (.*) (\".*\") (\".*\")")]
private static partial Regex LineRegex ();
diff --git a/src/LogExpert.Core/Classes/Columnizer/SquareBracketColumnizer.cs b/src/LogExpert.Core/Classes/Columnizer/SquareBracketColumnizer.cs
index dce092c9..5ac9f6da 100644
--- a/src/LogExpert.Core/Classes/Columnizer/SquareBracketColumnizer.cs
+++ b/src/LogExpert.Core/Classes/Columnizer/SquareBracketColumnizer.cs
@@ -6,11 +6,13 @@ namespace LogExpert.Core.Classes.Columnizer;
/// Provides functionality to split log lines into columns based on square bracket delimiters, typically extracting
/// date, time, and message fields for log analysis.
///
-/// This columnizer is designed for log formats where fields are enclosed in square brackets or separated
-/// by whitespace, with optional date and time columns at the beginning of each line. It supports dynamic detection of
-/// column structure based on sample log lines and can apply a time offset to parsed timestamps. The class implements
-/// interfaces for memory-efficient log line processing and columnizer prioritization, making it suitable for
-/// integration with log viewers or analysis tools that require flexible column extraction.
+///
+/// This columnizer is designed for log formats where fields are enclosed in square brackets or separated by whitespace,
+/// with optional date and time columns at the beginning of each line. It supports dynamic detection of column structure
+/// based on sample log lines and can apply a time offset to parsed timestamps. The class implements interfaces for
+/// memory-efficient log line processing and columnizer prioritization, making it suitable for integration with log
+/// viewers or analysis tools that require flexible column extraction.
+///
public class SquareBracketColumnizer : ILogLineMemoryColumnizer, IColumnizerPriorityMemory
{
#region ILogLineMemoryColumnizer implementation
@@ -51,7 +53,9 @@ public bool IsTimeshiftImplemented ()
///
/// Sets the time offset, in milliseconds, to be applied to time calculations.
///
- /// The time offset, in milliseconds, to apply. Positive values advance the time; negative values delay it.
+ ///
+ /// The time offset, in milliseconds, to apply. Positive values advance the time; negative values delay it.
+ ///
public void SetTimeOffset (int msecOffset)
{
_timeOffset = msecOffset;
@@ -60,8 +64,10 @@ public void SetTimeOffset (int msecOffset)
///
/// Gets the current time offset, in seconds, applied to time calculations.
///
- /// The time offset, in seconds. A positive value indicates a forward offset; a negative value indicates a backward
- /// offset.
+ ///
+ /// The time offset, in seconds. A positive value indicates a forward offset; a negative value indicates a backward
+ /// offset.
+ ///
public int GetTimeOffset ()
{
return _timeOffset;
@@ -70,13 +76,16 @@ public int GetTimeOffset ()
///
/// Extracts and parses the timestamp from the specified log line.
///
- /// If the log line does not contain a valid timestamp or if parsing fails, the method returns
- /// DateTime.MinValue. The expected timestamp is typically composed of the first two columns in the log
- /// line.
+ ///
+ /// If the log line does not contain a valid timestamp or if parsing fails, the method returns DateTime.MinValue.
+ /// The expected timestamp is typically composed of the first two columns in the log line.
+ ///
/// A callback interface used to assist with columnizing the log line.
/// The log line from which to extract the timestamp.
- /// A DateTime value representing the parsed timestamp if extraction and parsing succeed; otherwise,
- /// DateTime.MinValue.
+ ///
+ /// A DateTime value representing the parsed timestamp if extraction and parsing succeed; otherwise,
+ /// DateTime.MinValue.
+ ///
public DateTime GetTimestamp (ILogLineMemoryColumnizerCallback callback, ILogLineMemory logLine)
{
var cols = SplitLine(callback, logLine);
@@ -119,7 +128,9 @@ public string GetName ()
///
/// Gets a description of the log line splitting format, including the expected fields.
///
- /// A string describing how each log line is split into fields: Date, Time, and the remainder of the log message.
+ ///
+ /// A string describing how each log line is split into fields: Date, Time, and the remainder of the log message.
+ ///
public string GetDescription ()
{
return "Splits every line into n fields: Date, Time and the rest of the log message";
@@ -137,13 +148,17 @@ public int GetColumnCount ()
///
/// Returns an array of column names based on the current log format configuration.
///
- /// The set and order of column names depend on the log format and configuration. If time
- /// information is present, the array includes "Date" and "Time" columns. Additional columns such as "Level" and
- /// "Source" are included if the log contains more than three or four columns, respectively. Any extra columns are
- /// named sequentially as "Source1", "Source2", etc., before the final "Message" column.
- /// An array of strings containing the names of all columns in the log. The array includes standard columns such as
+ ///
+ /// The set and order of column names depend on the log format and configuration. If time information is present,
+ /// the array includes "Date" and "Time" columns. Additional columns such as "Level" and "Source" are included if
+ /// the log contains more than three or four columns, respectively. Any extra columns are named sequentially as
+ /// "Source1", "Source2", etc., before the final "Message" column.
+ ///
+ ///
+ /// An array of strings containing the names of all columns in the log. The array includes standard columns such as
/// "Date", "Time", "Level", "Source", and "Message", as well as additional source columns if present. The array
- /// will contain one element for each column in the log, in the order they appear.
+ /// will contain one element for each column in the log, in the order they appear.
+ ///
public string[] GetColumnNames ()
{
var columnNames = new List(GetColumnCount());
@@ -178,14 +193,19 @@ public string[] GetColumnNames ()
///
/// Splits the specified log line into its constituent columns based on detected date and time formats.
///
- /// If the log line does not match a recognized date and time format, the entire line is treated
- /// as a single column. If the log line is too short to contain date or time information, it is returned as a single
- /// column as well.
- /// A callback interface that can be used during the columnization process. This parameter may be used to provide
- /// additional context or services required for columnization.
+ ///
+ /// If the log line does not match a recognized date and time format, the entire line is treated as a single column.
+ /// If the log line is too short to contain date or time information, it is returned as a single column as well.
+ ///
+ ///
+ /// A callback interface that can be used during the columnization process. This parameter may be used to provide
+ /// additional context or services required for columnization.
+ ///
/// The log line to be split into columns. Cannot be null.
- /// An object representing the columnized version of the input log line. The returned object contains the extracted
- /// columns, which may include date, time, and the remainder of the line, depending on the detected format.
+ ///
+ /// An object representing the columnized version of the input log line. The returned object contains the extracted
+ /// columns, which may include date, time, and the remainder of the line, depending on the detected format.
+ ///
[System.Diagnostics.CodeAnalysis.SuppressMessage("Globalization", "CA1303:Do not pass literals as localized parameters", Justification = "Intentionally passed")]
public IColumnizedLogLineMemory SplitLine (ILogLineMemoryColumnizerCallback callback, ILogLineMemory logLine)
{
@@ -258,16 +278,22 @@ FormatException or
///
/// Splits a log line into an array of columns based on date, time, and bracketed field positions.
///
- /// If the input line does not contain enough fields to match the expected column count, empty
- /// columns are inserted to ensure the returned array has the correct length. The method associates each column with
- /// the provided parent log line object.
+ ///
+ /// If the input line does not contain enough fields to match the expected column count, empty columns are inserted
+ /// to ensure the returned array has the correct length. The method associates each column with the provided parent
+ /// log line object.
+ ///
/// The log line to split, provided as a read-only memory buffer of characters.
/// The length, in characters, of the date field at the start of the line.
/// The length, in characters, of the time field following the date field.
- /// The zero-based position in the line immediately after the date and time fields.
+ ///
+ /// The zero-based position in the line immediately after the date and time fields.
+ ///
/// The parent log line object to associate with each resulting column.
- /// An array of columns parsed from the input line. The array contains one element for each expected column, with
- /// empty columns inserted if the input does not provide enough fields.
+ ///
+ /// An array of columns parsed from the input line. The array contains one element for each expected column, with
+ /// empty columns inserted if the input does not provide enough fields.
+ ///
private Column[] SquareSplit (ReadOnlyMemory line, int dateLen, int timeLen, int dateTimeEndPos, ColumnizedLogLine clogLine)
{
List columnList = [];
@@ -331,12 +357,16 @@ private Column[] SquareSplit (ReadOnlyMemory line, int dateLen, int timeLe
/// Determines the priority level for parsing log lines based on the specified file name and a collection of log
/// line samples.
///
- /// The returned priority reflects how well the log format is supported based on the structure
- /// and content of the provided samples. This method does not modify the input collection.
+ ///
+ /// The returned priority reflects how well the log format is supported based on the structure and content of the
+ /// provided samples. This method does not modify the input collection.
+ ///
/// The name of the log file to analyze. Cannot be null.
/// A collection of log line samples to evaluate for format support. Cannot be null.
- /// A value indicating the priority level for parsing the provided log lines. Returns a higher priority if the
- /// format is well supported or perfectly supported; otherwise, returns a lower priority.
+ ///
+ /// A value indicating the priority level for parsing the provided log lines. Returns a higher priority if the
+ /// format is well supported or perfectly supported; otherwise, returns a lower priority.
+ ///
public Priority GetPriority (string fileName, IEnumerable samples)
{
ArgumentNullException.ThrowIfNull(fileName, nameof(fileName));
@@ -430,8 +460,12 @@ public Priority GetPriority (string fileName, IEnumerable sample
///
/// Determines the priority for processing the specified log file based on the provided log line samples.
///
- /// The name of the log file for which to determine the processing priority. Cannot be null or empty.
- /// A collection of log line samples used to assess the file's priority. Cannot be null.
+ ///
+ /// The name of the log file for which to determine the processing priority. Cannot be null or empty.
+ ///
+ ///
+ /// A collection of log line samples used to assess the file's priority. Cannot be null.
+ ///
/// A value indicating the determined priority for the specified log file.
public Priority GetPriority (string fileName, IEnumerable samples)
{
@@ -442,28 +476,39 @@ public Priority GetPriority (string fileName, IEnumerable samples)
/// Processes a value change for a specified column and updates the time offset if the column represents a
/// timestamp.
///
- /// This method only updates the time offset when the specified column index is 1 and both the
- /// new and old values can be parsed as valid timestamps according to the determined time format. No action is taken
- /// for other columns or if the values cannot be parsed as dates.
- /// The callback interface used to interact with the columnizer during value processing.
- /// The zero-based index of the column for which the value is being processed. If the value is 1, the method
- /// attempts to update the time offset.
+ ///
+ /// This method only updates the time offset when the specified column index is 1 and both the new and old values
+ /// can be parsed as valid timestamps according to the determined time format. No action is taken for other columns
+ /// or if the values cannot be parsed as dates.
+ ///
+ ///
+ /// The callback interface used to interact with the columnizer during value processing.
+ ///
+ ///
+ /// The zero-based index of the column for which the value is being processed. If the value is 1, the method
+ /// attempts to update the time offset.
+ ///
/// The new value to be processed for the specified column.
/// The previous value of the specified column before the change.
public void PushValue (ILogLineMemoryColumnizerCallback callback, int column, string value, string oldValue)
+ {
+ PushValue(callback, column, value, oldValue.AsMemory());
+ }
+
+ public void PushValue (ILogLineMemoryColumnizerCallback callback, int column, string value, ReadOnlyMemory oldValue)
{
if (column == 1)
{
try
{
- var formatInfo = _timeFormatDeterminer.DetermineTimeFormatInfo(oldValue.AsSpan());
+ var formatInfo = _timeFormatDeterminer.DetermineTimeFormatInfo(oldValue.Span);
if (formatInfo == null)
{
return;
}
var newDateTime = DateTime.ParseExact(value, formatInfo.TimeFormat, formatInfo.CultureInfo);
- var oldDateTime = DateTime.ParseExact(oldValue, formatInfo.TimeFormat, formatInfo.CultureInfo);
+ var oldDateTime = DateTime.ParseExact(oldValue.Span, formatInfo.TimeFormat, formatInfo.CultureInfo);
var mSecsOld = oldDateTime.Ticks / TimeSpan.TicksPerMillisecond;
var mSecsNew = newDateTime.Ticks / TimeSpan.TicksPerMillisecond;
_timeOffset = (int)(mSecsNew - mSecsOld);
diff --git a/src/LogExpert.Core/Classes/Columnizer/TimestampColumnizer.cs b/src/LogExpert.Core/Classes/Columnizer/TimestampColumnizer.cs
index f1d2a2d7..3a952aa3 100644
--- a/src/LogExpert.Core/Classes/Columnizer/TimestampColumnizer.cs
+++ b/src/LogExpert.Core/Classes/Columnizer/TimestampColumnizer.cs
@@ -49,13 +49,18 @@ public string[] GetColumnNames ()
///
/// Splits a log line into its constituent columns, typically separating date, time, and the remainder of the line.
///
- /// If the log line does not match a recognized date/time format, the entire line is returned as
- /// a single column. Columns typically represent the date, time, and the rest of the log entry. If parsing fails due
- /// to format issues, column values are set to "n/a" except for the remainder, which contains the original
- /// line.
- /// A callback interface used to provide additional context or services required during columnization.
+ ///
+ /// If the log line does not match a recognized date/time format, the entire line is returned as a single column.
+ /// Columns typically represent the date, time, and the rest of the log entry. If parsing fails due to format
+ /// issues, column values are set to "n/a" except for the remainder, which contains the original line.
+ ///
+ ///
+ /// A callback interface used to provide additional context or services required during columnization.
+ ///
/// The log line to be split into columns. Cannot be null.
- /// An object representing the columnized log line, with each column containing a segment of the original log line.
+ ///
+ /// An object representing the columnized log line, with each column containing a segment of the original log line.
+ ///
[System.Diagnostics.CodeAnalysis.SuppressMessage("Globalization", "CA1303:Do not pass literals as localized parameters", Justification = "Intentionally passed")]
public IColumnizedLogLineMemory SplitLine (ILogLineMemoryColumnizerCallback callback, ILogLineMemory logLine)
{
@@ -144,13 +149,16 @@ FormatException or
///
/// Extracts and parses the timestamp from the specified log line using the provided callback.
///
- /// If the log line does not contain a valid timestamp in the expected columns or if parsing
- /// fails, the method returns DateTime.MinValue. The timestamp is expected to be composed from the first two columns
- /// of the log line.
+ ///
+ /// If the log line does not contain a valid timestamp in the expected columns or if parsing fails, the method
+ /// returns DateTime.MinValue. The timestamp is expected to be composed from the first two columns of the log line.
+ ///
/// The callback used to access column information for the log line.
/// The log line from which to extract the timestamp.
- /// A DateTime value representing the parsed timestamp if extraction and parsing succeed; otherwise,
- /// DateTime.MinValue.
+ ///
+ /// A DateTime value representing the parsed timestamp if extraction and parsing succeed; otherwise,
+ /// DateTime.MinValue.
+ ///
public DateTime GetTimestamp (ILogLineMemoryColumnizerCallback callback, ILogLineMemory logLine)
{
var cols = SplitLine(callback, logLine);
@@ -192,19 +200,24 @@ FormatException or
}
public void PushValue (ILogLineMemoryColumnizerCallback callback, int column, string value, string oldValue)
+ {
+ PushValue(callback, column, value, oldValue.AsMemory());
+ }
+
+ public void PushValue (ILogLineMemoryColumnizerCallback callback, int column, string value, ReadOnlyMemory oldValue)
{
if (column == 1)
{
try
{
- var formatInfo = _timeFormatDeterminer.DetermineTimeFormatInfo(oldValue.AsSpan());
+ var formatInfo = _timeFormatDeterminer.DetermineTimeFormatInfo(oldValue.Span);
if (formatInfo == null)
{
return;
}
var newDateTime = DateTime.ParseExact(value, formatInfo.TimeFormat, formatInfo.CultureInfo);
- var oldDateTime = DateTime.ParseExact(oldValue, formatInfo.TimeFormat, formatInfo.CultureInfo);
+ var oldDateTime = DateTime.ParseExact(oldValue.Span, formatInfo.TimeFormat, formatInfo.CultureInfo);
var mSecsOld = oldDateTime.Ticks / TimeSpan.TicksPerMillisecond;
var mSecsNew = newDateTime.Ticks / TimeSpan.TicksPerMillisecond;
_timeOffset = (int)(mSecsNew - mSecsOld);
@@ -221,8 +234,10 @@ public void PushValue (ILogLineMemoryColumnizerCallback callback, int column, st
///
/// The name of the log file to evaluate. Cannot be null.
/// A collection of log lines to analyze for timestamp patterns. Cannot be null.
- /// A value indicating the priority for processing the specified log file. Returns Priority.WellSupport if the
- /// majority of log lines contain recognizable timestamps; otherwise, returns Priority.NotSupport.
+ ///
+ /// A value indicating the priority for processing the specified log file. Returns Priority.WellSupport if the
+ /// majority of log lines contain recognizable timestamps; otherwise, returns Priority.NotSupport.
+ ///
public Priority GetPriority (string fileName, IEnumerable samples)
{
ArgumentNullException.ThrowIfNull(samples, nameof(samples));
diff --git a/src/LogExpert.Core/Classes/Log/BatchedProgressReporter.cs b/src/LogExpert.Core/Classes/Log/BatchedProgressReporter.cs
deleted file mode 100644
index 03d711bb..00000000
--- a/src/LogExpert.Core/Classes/Log/BatchedProgressReporter.cs
+++ /dev/null
@@ -1,106 +0,0 @@
-using System.Collections.Concurrent;
-
-using LogExpert.Core.EventArguments;
-
-namespace LogExpert.Core.Classes.Log;
-
-///
-/// Batches progress updates to reduce UI thread marshalling overhead.
-/// Collects updates in a thread-safe queue and processes them on a timer.
-///
-//TODO Refactor
-public sealed class BatchedProgressReporter : IDisposable
-{
- private readonly ConcurrentQueue _progressQueue = new();
- private readonly Timer _timer;
- private readonly Action _progressCallback;
- private readonly int _updateIntervalMs;
- private bool _disposed;
-
- ///
- /// Creates a new batched progress reporter.
- ///
- /// Callback to invoke with latest progress
- /// Update interval in milliseconds (default: 100ms)
- public BatchedProgressReporter (Action progressCallback, int updateIntervalMs = 100)
- {
- _progressCallback = progressCallback ?? throw new ArgumentNullException(nameof(progressCallback));
- _updateIntervalMs = updateIntervalMs;
-
- // Start timer
- _timer = new Timer(ProcessQueue, null, updateIntervalMs, updateIntervalMs);
- }
-
- ///
- /// Reports progress (thread-safe, non-blocking)
- ///
- public void ReportProgress (LoadFileEventArgs args)
- {
- if (_disposed)
- {
- return;
- }
-
- // Only keep the latest update - discard old ones
- _progressQueue.Enqueue(args);
-
- // Keep queue size bounded (max 10 items)
- while (_progressQueue.Count > 10)
- {
- _ = _progressQueue.TryDequeue(out _);
- }
- }
-
- ///
- /// Flushes any pending updates immediately
- ///
- public void Flush ()
- {
- ProcessQueue(null);
- }
-
- private void ProcessQueue (object state)
- {
- if (_disposed)
- {
- return;
- }
-
- // Get only the LATEST update (discard intermediate ones)
- LoadFileEventArgs latestUpdate = null;
- while (_progressQueue.TryDequeue(out var update))
- {
- latestUpdate = update;
- }
-
- // Invoke callback with latest update
- if (latestUpdate != null)
- {
- try
- {
- _progressCallback(latestUpdate);
- }
- catch (Exception ex)
- {
- // Log but don't crash
- System.Diagnostics.Debug.WriteLine($"Error in progress callback: {ex.Message}");
- }
- }
- }
-
- public void Dispose ()
- {
- if (_disposed)
- {
- return;
- }
-
- _disposed = true;
-
- Flush();
- _timer?.Dispose();
-
- // Clear queue
- _progressQueue.Clear();
- }
-}
\ No newline at end of file
diff --git a/src/LogExpert.Core/Classes/Log/Buffers/BufferIndex.cs b/src/LogExpert.Core/Classes/Log/Buffers/BufferIndex.cs
new file mode 100644
index 00000000..d70cbb7d
--- /dev/null
+++ b/src/LogExpert.Core/Classes/Log/Buffers/BufferIndex.cs
@@ -0,0 +1,656 @@
+using System.Collections.Concurrent;
+using System.Diagnostics;
+using System.Globalization;
+
+using NLog;
+
+namespace LogExpert.Core.Classes.Log.Buffers;
+
+/*
+ * !IMPORTANT
+ * Before and after changes are made run the BufferIndexBenchmarks for a baseline, so no performance regression is introduced
+ * If changes are made to this class, please also review BufferIndexSnapshot and BufferShiftTest to ensure consistency and correctness.
+ */
+
+///
+/// Thread-safe index that maps line numbers to instances with LRU eviction. This is the hot
+/// path — every GetLogLine call goes through here. Has zero file-I/O dependencies. Constructable with only integers for
+/// benchmarking.
+///
+public sealed class BufferIndex : IDisposable
+{
+ private readonly int _maxBuffers;
+ private readonly int _maxLinesPerBuffer;
+ private readonly ReaderWriterLockSlim _lock = new(LockRecursionPolicy.SupportsRecursion);
+ private readonly SortedList _bufferList = [];
+ private readonly ConcurrentDictionary _lruCacheDict;
+ private readonly ThreadLocal _lastBufferIndex = new(() => -1);
+
+ private static readonly Logger _logger = LogManager.GetCurrentClassLogger();
+
+ private volatile bool _isLineCountDirty = true;
+ private int _cachedLineCount;
+
+ public BufferIndex (int maxBuffers, int maxLinesPerBuffer)
+ {
+ _maxBuffers = maxBuffers;
+ _maxLinesPerBuffer = maxLinesPerBuffer;
+ _lruCacheDict = new(Environment.ProcessorCount, maxBuffers + 1);
+ }
+
+ #region Hot Path Lookup
+
+ ///
+ /// 4-layer lookup. Caller must hold at least a read lock. Returns false if lineNum is out of range or the index is
+ /// empty.
+ ///
+ public LogBufferEntry TryFindBuffer (int lineNum)
+ {
+ return TryFindBufferWithIndex(lineNum);
+ }
+
+ ///
+ /// Core buffer lookup returning both buffer and index position. The caller MUST already hold a read,
+ /// upgradeable-read, or write lock.
+ ///
+ internal LogBufferEntry GetBufferForLineWithIndex (int lineNum)
+ {
+ return TryFindBufferWithIndex(lineNum);
+ }
+
+ private LogBufferEntry TryFindBufferWithIndex (int lineNum)
+ {
+#if DEBUG
+ Util.AssertTrue(_lock.IsReadLockHeld || _lock.IsUpgradeableReadLockHeld || _lock.IsWriteLockHeld, "No lock held for buffer list in TryFindBufferWithIndex");
+ long startTime = Environment.TickCount;
+#endif
+ var arr = _bufferList.Values;
+ var count = arr.Count;
+
+ if (count == 0)
+ {
+ return new LogBufferEntry(null, -1, false);
+ }
+
+ // Layer 0: Last buffer cache — O(1) for sequential access
+ var lastIdx = _lastBufferIndex.Value;
+ if (lastIdx >= 0 && lastIdx < count)
+ {
+ var buf = arr[lastIdx];
+ if ((uint)(lineNum - buf.StartLine) < (uint)buf.LineCount)
+ {
+ //dont UpdateLRUCache, the cache has not changed in layer 0
+ return new LogBufferEntry(buf, lastIdx, true);
+ }
+
+ // Layer 1: Adjacent buffer prediction — O(1) for buffer boundary crossings
+ if (lastIdx + 1 < count)
+ {
+ var next = arr[lastIdx + 1];
+ if ((uint)(lineNum - next.StartLine) < (uint)next.LineCount)
+ {
+ _lastBufferIndex.Value = lastIdx + 1;
+ UpdateLru(next);
+ return new LogBufferEntry(next, lastIdx + 1, true);
+ }
+ }
+
+ if (lastIdx - 1 >= 0)
+ {
+ var prev = arr[lastIdx - 1];
+ if ((uint)(lineNum - prev.StartLine) < (uint)prev.LineCount)
+ {
+ _lastBufferIndex.Value = lastIdx - 1;
+ UpdateLru(prev);
+ return new LogBufferEntry(prev, lastIdx - 1, true);
+ }
+ }
+ }
+
+ // Layer 2: Direct mapping guess — O(1) speculative for uniform buffers
+ var guess = lineNum / _maxLinesPerBuffer;
+ if ((uint)guess < (uint)count)
+ {
+ var buf = arr[guess];
+ if ((uint)(lineNum - buf.StartLine) < (uint)buf.LineCount)
+ {
+ _lastBufferIndex.Value = guess;
+ UpdateLru(buf);
+ return new LogBufferEntry(buf, guess, true);
+ }
+ }
+
+ // Layer 3: Branchless binary search with power-of-two strides
+ var step = HighestPowerOfTwo(count);
+ var idx = (arr[step - 1].StartLine <= lineNum) ? count - step : 0;
+
+ for (step >>= 1; step > 0; step >>= 1)
+ {
+ var probe = idx + step;
+ if (probe < count && arr[probe].StartLine <= lineNum)
+ {
+ idx = probe;
+ }
+ }
+
+ // idx is now the buffer index — verify bounds
+ if (idx < count)
+ {
+ var buf = arr[idx];
+ if ((uint)(lineNum - buf.StartLine) < (uint)buf.LineCount)
+ {
+ _lastBufferIndex.Value = idx;
+ UpdateLru(buf);
+ return new LogBufferEntry(buf, idx, true);
+ }
+ }
+#if DEBUG
+ long endTime = Environment.TickCount;
+ _logger.Debug($"TryFindBufferWithIndex({lineNum}) duration: {endTime - startTime} ms.");
+#endif
+ return new LogBufferEntry(null, -1, false);
+ }
+
+ #endregion
+
+ #region Navigation: multi-file traversal
+
+ ///
+ /// Finds the start line of the next file segment after . Caller must hold at least a read
+ /// lock.
+ ///
+ public (bool Found, int StartLine) TryGetNextFileStartLine (int lineNum)
+ {
+ var result = -1;
+
+ var foundBufferEntry = TryFindBufferWithIndex(lineNum);
+ if (!foundBufferEntry.Found)
+ {
+ return (foundBufferEntry.Found, result);
+ }
+
+ for (var i = foundBufferEntry.Index; i < _bufferList.Values.Count; ++i)
+ {
+ if (_bufferList.Values[i].FileInfo != foundBufferEntry.Buffer.FileInfo)
+ {
+ result = _bufferList.Values[i].StartLine;
+ break;
+ }
+ }
+
+ return (result != -1, result);
+ }
+
+ ///
+ /// Finds the start line of the previous file segment before . Caller must hold at least a
+ /// read lock.
+ ///
+ public (bool Found, int StartLine) TryGetPrevFileStartLine (int lineNum)
+ {
+ var result = -1;
+
+ var foundBufferEntry = TryFindBufferWithIndex(lineNum);
+
+ if (!foundBufferEntry.Found)
+ {
+ return (foundBufferEntry.Found, result);
+ }
+
+ if (foundBufferEntry.Buffer != null && foundBufferEntry.Index != -1)
+ {
+ for (var i = foundBufferEntry.Index; i >= 0; --i)
+ {
+ if (_bufferList.Values[i].FileInfo != foundBufferEntry.Buffer.FileInfo)
+ {
+ result = _bufferList.Values[i].StartLine + _bufferList.Values[i].LineCount;
+ break;
+ }
+ }
+ }
+
+ return (result != -1, result);
+ }
+
+ ///
+ /// Finds the first buffer belonging to the same file as . Caller must hold at least a
+ /// read lock.
+ ///
+ public LogBuffer? GetFirstBufferForFile (LogBuffer logBuffer, int index)
+ {
+ //maybe not necessary
+ ArgumentNullException.ThrowIfNull(logBuffer, "GetFirstBufferForFile not possible: Buffer is NULL");
+
+ if (index == -1)
+ {
+ return null;
+ }
+
+ var info = logBuffer.FileInfo;
+
+ var resultBuffer = logBuffer;
+ while (true)
+ {
+ index--;
+ if (index < 0 || _bufferList.Values[index].FileInfo != info)
+ {
+ break;
+ }
+
+ resultBuffer = _bufferList.Values[index];
+ }
+
+ return resultBuffer;
+ }
+
+ #endregion
+
+ #region Mutation — called during reads and rollover
+
+ ///
+ /// Adds a buffer to the index and updates LRU tracking. Caller must hold a write lock.
+ ///
+ public void Add (LogBuffer buffer)
+ {
+#if DEBUG
+ _logger.Debug(CultureInfo.InvariantCulture, "AddBufferToList(): {0}/{1}/{2}", buffer.StartLine, buffer.LineCount, buffer.FileInfo.FullName);
+#endif
+ _bufferList[buffer.StartLine] = buffer;
+ UpdateLru(buffer);
+ _isLineCountDirty = true;
+ }
+
+ ///
+ /// Removes a buffer by its start line key and LRU entry. Caller must hold a write lock.
+ ///
+ public bool Remove (LogBuffer buffer)
+ {
+ ArgumentNullException.ThrowIfNull(buffer, "Remove not possible: Buffer is NULL");
+
+ Debug.Assert(_lock.IsWriteLockHeld, "No writer lock for buffer list");
+ _ = _lruCacheDict.TryRemove(buffer.StartLine, out _);
+ _isLineCountDirty = true;
+ return _bufferList.Remove(buffer.StartLine);
+ }
+
+ ///
+ /// Atomically updates a buffer's start line in both the index and LRU cache. Used by ShiftBuffers during rollover.
+ /// Caller must hold a write lock.
+ ///
+ public void UpdateStartLine (LogBuffer buffer, int newStartLine)
+ {
+ var hadCache = _lruCacheDict.TryRemove(buffer.StartLine, out var cacheEntry);
+
+ _ = _bufferList.Remove(buffer.StartLine);
+ buffer.StartLine = newStartLine;
+ _bufferList[newStartLine] = buffer;
+
+ if (hadCache)
+ {
+ _ = _lruCacheDict.TryAdd(buffer.StartLine, cacheEntry);
+ }
+
+ _isLineCountDirty = true;
+ }
+
+ ///
+ /// Clears all buffers and LRU entries. Does NOT dispose buffer content. Caller must hold a write lock.
+ ///
+ public void Clear ()
+ {
+ _bufferList.Clear();
+ _lruCacheDict.Clear();
+ ResetThreadLocalCache();
+ _isLineCountDirty = true;
+ }
+
+ #endregion
+
+ #region LRU eviction
+
+ ///
+ /// Removes least-recently-used entries when cache exceeds max size. Evicts content but preserves metadata so
+ /// buffers remain findable for re-read. Does NOT acquire _lock — only touches _lruCache (ConcurrentDictionary) and
+ /// individual buffer SpinLocks.
+ ///
+ public void EvictLeastRecentlyUsed ()
+ {
+#if DEBUG
+ long startTime = Environment.TickCount;
+#endif
+ _logger.Debug(CultureInfo.InvariantCulture, "Starting garbage collection");
+ var threshold = 10;
+
+ if (_lruCacheDict.Count - (_maxBuffers + threshold) > 0)
+ {
+ var diff = _lruCacheDict.Count - _maxBuffers;
+#if DEBUG
+ if (diff > 0)
+ {
+ _logger.Info(CultureInfo.InvariantCulture, "Removing {0} entries from LRU cache", diff);
+ }
+#endif
+ // Snapshot values and sort by timestamp (ascending = least recently used first)
+ var entries = _lruCacheDict.ToArray();
+ Array.Sort(entries, static (a, b) => a.Value.LastUseTimeStamp.CompareTo(b.Value.LastUseTimeStamp));
+
+ for (var i = 0; i < diff && i < entries.Length; ++i)
+ {
+ var kvp = entries[i];
+ if (_lruCacheDict.TryRemove(kvp.Key, out var removed))
+ {
+ // Skip pinned buffers — the UI is actively displaying their content.
+ // Re-add to LRU so they'll be reconsidered in a future eviction pass.
+ if (removed.LogBuffer.IsPinned)
+ {
+ _lruCacheDict.TryAdd(kvp.Key, removed);
+ continue;
+ }
+
+ var lockTaken = false;
+ try
+ {
+ removed.LogBuffer.AcquireContentLock(ref lockTaken);
+ // Evict content but preserve metadata (LineCount, StartLine, etc.)
+ // so the buffer remains findable in _bufferList lookups.
+ // Do NOT return to pool — the buffer is still referenced by _bufferList.
+ removed.LogBuffer.EvictContent();
+ }
+ finally
+ {
+ if (lockTaken)
+ {
+ removed.LogBuffer.ReleaseContentLock();
+ }
+ }
+ }
+ }
+ }
+
+#if DEBUG
+ if (_lruCacheDict.Count - (_maxBuffers + threshold) > 0)
+ {
+ long endTime = Environment.TickCount;
+ _logger.Info(CultureInfo.InvariantCulture, "Garbage collector time: " + (endTime - startTime) + " ms.");
+ }
+#endif
+ }
+
+ ///
+ /// Pins all buffers that cover the specified line range. Returns a
+ /// that unpins them on dispose. Caller must hold at least a read lock.
+ ///
+ public PinHandle PinRange (int startLine, int endLine)
+ {
+ var pinned = new List();
+ var line = startLine;
+
+ while (line <= endLine)
+ {
+ var entry = TryFindBuffer(line);
+ if (!entry.Found || entry.Buffer is null)
+ {
+ break;
+ }
+
+ entry.Buffer.Pin();
+ pinned.Add(entry.Buffer);
+
+ // Jump to next buffer's start line to avoid redundant lookups
+ line = entry.Buffer.StartLine + entry.Buffer.LineCount;
+ }
+
+ return new PinHandle(pinned);
+ }
+
+ ///
+ /// Atomically clears the index and returns all LRU-tracked buffers to the pool. Clears the index FIRST under the
+ /// caller's write lock, THEN returns buffers to pool. This prevents a race where concurrent readers find buffers
+ /// that have been returned to the pool. Caller must hold a write lock.
+ ///
+ public void ClearLru (LogBufferPool pool)
+ {
+ _logger.Info(CultureInfo.InvariantCulture, "Clearing LRU cache.");
+
+ // 1. Collect buffer references before clearing
+ var toReturn = new List(_lruCacheDict.Count);
+ foreach (var entry in _lruCacheDict.Values)
+ {
+ toReturn.Add(entry.LogBuffer);
+ }
+
+ // 2. Clear index FIRST — no concurrent reader can find these after this
+ _bufferList.Clear();
+ _lruCacheDict.Clear();
+ _isLineCountDirty = true;
+ ResetThreadLocalCache();
+
+ // 3. Now safe to return to pool
+ foreach (var entry in toReturn)
+ {
+ var lockTaken = false;
+ try
+ {
+ entry.AcquireContentLock(ref lockTaken);
+ pool.Return(entry);
+ }
+ finally
+ {
+ if (lockTaken)
+ {
+ entry.ReleaseContentLock();
+ }
+ }
+ }
+
+ _logger.Info(CultureInfo.InvariantCulture, "Clearing done.");
+ }
+
+ #endregion
+
+ ///
+ /// Gets the number of buffers.
+ ///
+ public int BufferCount => _bufferList.Count;
+
+ ///
+ /// Returns the buffer at the specified positional index. Caller must hold at least a read lock.
+ ///
+ public LogBuffer GetBufferAt (int index) => _bufferList.GetValueAtIndex(index);
+
+ ///
+ /// Returns the last buffer in the index (highest start line). Caller must hold at least a read lock.
+ ///
+ public LogBuffer GetLastBuffer () => _bufferList.GetValueAtIndex(_bufferList.Count - 1);
+
+ ///
+ /// Returns an enumerable collection of all log buffers managed by the current instance.
+ ///
+ ///
+ /// An containing each in the collection. The
+ /// enumeration reflects the current state of the buffers at the time of the call.
+ ///
+ public IEnumerable EnumerateBuffers () { return [.. _bufferList.Values]; }
+
+ ///
+ /// Total lines across all buffers. Recalculated on demand when dirty. Caller must hold at least a read lock.
+ ///
+ public int TotalLineCount
+ {
+ get
+ {
+ if (_isLineCountDirty)
+ {
+ var total = 0;
+ foreach (var buffer in _bufferList.Values)
+ {
+ total += buffer.LineCount;
+ }
+
+ _cachedLineCount = total;
+ _isLineCountDirty = false;
+ }
+
+ return _cachedLineCount;
+ }
+ }
+
+ public void MarkLineCountDirty () => _isLineCountDirty = true;
+
+ ///
+ /// Gets the number of items currently stored in the least recently used (LRU) cache.
+ ///
+ public int LruCacheCount => _lruCacheDict.Count;
+
+ #region Lock management — using-scoped only
+
+ public ReadLockScope AcquireReadLock () => new(_lock);
+
+ public WriteLockScope AcquireWriteLock () => new(_lock);
+
+ public UpgradeableReadLockScope AcquireUpgradeableReadLock () => new(_lock);
+
+ #endregion
+
+ #region Diagnostics
+
+ ///
+ /// Creates an immutable point-in-time capture of the index state. Acquires its own read lock internally.
+ ///
+ public BufferIndexSnapshot CreateSnapshot ()
+ {
+ using var _ = AcquireReadLock();
+
+ var buffers = new List(_bufferList.Count);
+
+ foreach (var b in _bufferList.Values)
+ {
+ buffers.Add(new BufferIndexSnapshot.BufferInfo
+ (
+ b.StartLine,
+ b.LineCount,
+ b.StartPos,
+ b.Size,
+ b.IsDisposed,
+ b.FileInfo.FullName
+ ));
+ }
+
+ return new BufferIndexSnapshot
+ {
+ BufferCount = _bufferList.Count,
+ TotalLineCount = TotalLineCount,
+ LruCacheCount = _lruCacheDict.Count,
+ Buffers = buffers
+ };
+ }
+
+ #endregion
+
+ #region Internal Helpers
+
+ public void ResetThreadLocalCache () => _lastBufferIndex.Value = -1;
+
+ private void UpdateLru (LogBuffer logBuffer)
+ {
+ var cacheEntry = _lruCacheDict.GetOrAdd(
+ logBuffer.StartLine,
+ static (_, buf) => new LogBufferCacheEntry { LogBuffer = buf },
+ logBuffer);
+
+ cacheEntry.Touch();
+ }
+
+ private static int HighestPowerOfTwo (int n) => 1 << (31 - int.LeadingZeroCount(n));
+
+ public void Dispose ()
+ {
+ _lastBufferIndex.Dispose();
+ _lock.Dispose();
+ }
+
+ #endregion
+}
+
+#region Lock scope structs
+
+public readonly ref struct ReadLockScope
+{
+ private readonly ReaderWriterLockSlim _lock;
+
+ public ReadLockScope (ReaderWriterLockSlim rwLock)
+ {
+ _lock = rwLock;
+ if (!_lock.TryEnterReadLock(TimeSpan.FromSeconds(10)))
+ {
+ //_logger.Warn("Reader lock wait timed out, forcing entry");
+ _lock.EnterReadLock();
+ }
+ }
+
+ public void Dispose () => _lock.ExitReadLock();
+
+}
+
+public readonly ref struct WriteLockScope
+{
+ private readonly ReaderWriterLockSlim _lock;
+
+ public WriteLockScope (ReaderWriterLockSlim rwLock)
+ {
+ _lock = rwLock;
+ if (!_lock.TryEnterWriteLock(TimeSpan.FromSeconds(10)))
+ {
+ //_logger.Warn("Writer lock wait timed out, forcing entry");
+ _lock.EnterWriteLock();
+ }
+ }
+
+ public void Dispose () => _lock.ExitWriteLock();
+}
+
+public readonly ref struct UpgradeableReadLockScope
+{
+ private readonly ReaderWriterLockSlim _lock;
+
+ public UpgradeableReadLockScope (ReaderWriterLockSlim rwLock)
+ {
+ _lock = rwLock;
+ if (!_lock.TryEnterUpgradeableReadLock(TimeSpan.FromSeconds(10)))
+ {
+ //_logger.Warn("Upgradeable read lock timed out, forcing entry");
+ _lock.EnterUpgradeableReadLock();
+ }
+ }
+
+ public WriteLockUpgradeScope UpgradeToWrite () => new(_lock);
+
+ public void Dispose () => _lock.ExitUpgradeableReadLock();
+}
+
+public readonly ref struct WriteLockUpgradeScope
+{
+ private readonly ReaderWriterLockSlim _lock;
+
+ public WriteLockUpgradeScope (ReaderWriterLockSlim rwls)
+ {
+ _lock = rwls;
+ if (!_lock.TryEnterWriteLock(TimeSpan.FromSeconds(10)))
+ {
+ //_logger.Warn("Writer lock upgrade timed out, forcing entry");
+ _lock.EnterWriteLock();
+ }
+ }
+
+ public void Dispose () => _lock.ExitWriteLock();
+}
+
+#endregion
+
+public readonly struct LogBufferEntry (LogBuffer? buffer, int index, bool found)
+{
+ public LogBuffer? Buffer { get; } = buffer;
+
+ public int Index { get; } = index;
+
+ public bool Found { get; } = found;
+}
\ No newline at end of file
diff --git a/src/LogExpert.Core/Classes/Log/Buffers/BufferIndexSnapshot.cs b/src/LogExpert.Core/Classes/Log/Buffers/BufferIndexSnapshot.cs
new file mode 100644
index 00000000..5b54390b
--- /dev/null
+++ b/src/LogExpert.Core/Classes/Log/Buffers/BufferIndexSnapshot.cs
@@ -0,0 +1,24 @@
+namespace LogExpert.Core.Classes.Log.Buffers;
+
+///
+/// Immutable point-in-time capture of state.
+/// Taken under a single read lock, safe to inspect afterward without locks.
+///
+public sealed class BufferIndexSnapshot
+{
+ public int BufferCount { get; init; }
+ public int TotalLineCount { get; init; }
+ public int LruCacheCount { get; init; }
+ public IReadOnlyList Buffers { get; init; } = [];
+
+ public sealed record BufferInfo (
+ int StartLine,
+ int LineCount,
+ long StartPos,
+ long Size,
+ bool IsDisposed,
+ string FileName);
+
+ public override string ToString () =>
+ $"Buffers={BufferCount}, Lines={TotalLineCount}, LRU={LruCacheCount}";
+}
\ No newline at end of file
diff --git a/src/LogExpert.Core/Classes/Log/Buffers/CharBlockAllocator.cs b/src/LogExpert.Core/Classes/Log/Buffers/CharBlockAllocator.cs
new file mode 100644
index 00000000..50061f94
--- /dev/null
+++ b/src/LogExpert.Core/Classes/Log/Buffers/CharBlockAllocator.cs
@@ -0,0 +1,136 @@
+using System.Buffers;
+
+namespace LogExpert.Core.Classes.Log.Buffers;
+
+///
+/// Blocks are rented from and returned when the owning is
+/// evicted. The pinning mechanism (Phase 1/2) ensures that buffers whose content is still displayed by the UI are
+/// exempt from eviction, preventing use-after-return corruption.
+/// This class is NOT thread-safe. Each reader/fill operation should use its own instance.
+///
+public sealed class CharBlockAllocator : IDisposable
+{
+ private const int DEFAULT_BLOCK_SIZE = 32_768; // 64 KB (32K chars × 2 bytes), stays under 85 KB LOH threshold
+
+ private readonly int _blockSize;
+ private List _blocks = [];
+ private readonly List _oversizedBlocks = [];
+ private char[] _currentBlock;
+ private int _currentOffset;
+ private bool _disposed;
+
+ public CharBlockAllocator (int blockSize = DEFAULT_BLOCK_SIZE)
+ {
+ _blockSize = blockSize;
+ _currentBlock = ArrayPool.Shared.Rent(_blockSize);
+ _blocks.Add(_currentBlock);
+ _currentOffset = 0;
+ }
+
+ ///
+ /// Gets the number of normal (fixed-size) blocks currently rented from the pool.
+ ///
+ public int BlockCount => _blocks.Count;
+
+ ///
+ /// Gets the number of oversized (standalone) blocks currently rented from the pool.
+ /// Useful for diagnostics — a high count indicates pathological line lengths.
+ ///
+ public int OversizedBlockCount => _oversizedBlocks.Count;
+
+ ///
+ /// Allocates a region of the specified length from the current block.
+ /// If the current block has insufficient space, a new block is rented.
+ /// Lines longer than the block size receive a standalone rental tracked separately.
+ ///
+ public Memory Rent (int length)
+ {
+ ObjectDisposedException.ThrowIf(_disposed, this);
+
+ if (length <= 0)
+ {
+ return Memory.Empty;
+ }
+
+ // Oversized line: give it its own array, tracked separately
+ if (length > _blockSize)
+ {
+ var oversized = new char[length];
+ _oversizedBlocks.Add(oversized);
+ return oversized.AsMemory(0, length);
+ }
+
+ // Current block has space
+ if (_currentOffset + length <= _currentBlock.Length)
+ {
+ var memory = _currentBlock.AsMemory(_currentOffset, length);
+ _currentOffset += length;
+ return memory;
+ }
+
+ // Need a new block
+ _currentBlock = ArrayPool.Shared.Rent(_blockSize);
+ _blocks.Add(_currentBlock);
+ _currentOffset = length;
+ return _currentBlock.AsMemory(0, length);
+ }
+
+ ///
+ /// Detaches and returns the list of all blocks (normal + oversized). After this call,
+ /// the allocator no longer owns those blocks — the caller (LogBuffer) holds them
+ /// until GC collects them after all slices are released.
+ ///
+ public List DetachBlocks ()
+ {
+ ObjectDisposedException.ThrowIf(_disposed, this);
+
+ // Merge oversized blocks into the main list so the caller owns everything
+ if (_oversizedBlocks.Count > 0)
+ {
+ _blocks.AddRange(_oversizedBlocks);
+ _oversizedBlocks.Clear();
+ }
+
+ // Swap the list — O(1), no copy. Caller owns the old list.
+ var blocks = _blocks;
+ _currentBlock = ArrayPool.Shared.Rent(_blockSize);
+ _blocks = [_currentBlock];
+ _currentOffset = 0;
+ return blocks;
+ }
+
+ ///
+ /// Returns all blocks to .
+ /// Safe to call only when no slices reference these blocks
+ /// (i.e., after DetachBlocks transferred ownership to LogBuffer, and the reader is being disposed).
+ ///
+ public void ReturnAll ()
+ {
+ foreach (var block in _blocks)
+ {
+ ArrayPool.Shared.Return(block);
+ }
+
+ _blocks.Clear();
+
+ foreach (var block in _oversizedBlocks)
+ {
+ ArrayPool.Shared.Return(block);
+ }
+
+ _oversizedBlocks.Clear();
+ _currentBlock = null!;
+ _currentOffset = 0;
+ }
+
+ public void Dispose ()
+ {
+ if (_disposed)
+ {
+ return;
+ }
+
+ ReturnAll();
+ _disposed = true;
+ }
+}
\ No newline at end of file
diff --git a/src/LogExpert.Core/Classes/Log/Buffers/LogBuffer.cs b/src/LogExpert.Core/Classes/Log/Buffers/LogBuffer.cs
new file mode 100644
index 00000000..020752fb
--- /dev/null
+++ b/src/LogExpert.Core/Classes/Log/Buffers/LogBuffer.cs
@@ -0,0 +1,346 @@
+using System.Buffers;
+
+using ColumnizerLib;
+
+using NLog;
+
+namespace LogExpert.Core.Classes.Log.Buffers;
+
+public class LogBuffer
+{
+ #region Fields
+
+ private SpinLock _contentLock = new(enableThreadOwnerTracking: false);
+ private static readonly Logger _logger = LogManager.GetCurrentClassLogger();
+
+#if DEBUG
+ private readonly List _filePositions; // file position for every line
+#endif
+
+ private LogLine[] _lineArray;
+ private int _lineArrayLength; // capacity of the rented array
+ private List _charBlocks;
+
+ private int _pinCount;
+
+ private int MAX_LINES = 500;
+
+ #endregion
+
+ #region cTor
+
+ // Don't use a primary constructor here: field initializers (like MAX_LINES) run before primary constructor parameters are assigned,
+ // so MAX_LINES would always be set to its default value before the constructor body can assign it. Use a regular constructor instead.
+ public LogBuffer (ILogFileInfo fileInfo, int maxLines)
+ {
+ FileInfo = fileInfo;
+ MAX_LINES = maxLines;
+ _lineArray = ArrayPool.Shared.Rent(maxLines);
+ _lineArrayLength = _lineArray.Length;
+#if DEBUG
+ _filePositions = new(MAX_LINES);
+#endif
+ }
+
+ #endregion
+
+ #region Properties
+
+ ///
+ /// Returns true if any component has pinned this buffer to prevent eviction.
+ ///
+ public bool IsPinned => Volatile.Read(ref _pinCount) > 0;
+
+ public long StartPos { set; get; }
+
+ public long Size
+ {
+ set
+ {
+ field = value;
+#if DEBUG
+ if (_filePositions.Count > 0)
+ {
+ if (field < _filePositions[^1] - StartPos)
+ {
+ _logger.Error("### LogBuffer: LogBuffer overall Size must be greater than last line file position!");
+ }
+ }
+#endif
+ }
+ get;
+ }
+
+ public int EndLine => StartLine + LineCount;
+
+ public int StartLine { set; get; }
+
+ public int LineCount { get; private set; }
+
+ public bool IsDisposed { get; private set; }
+
+ public ILogFileInfo FileInfo { get; set; }
+
+ public int DroppedLinesCount { get; set; }
+
+ public int PrevBuffersDroppedLinesSum { get; set; }
+
+ #endregion
+
+ #region Public methods
+
+ ///
+ /// Increments the pin count. While pinned, the buffer will not be evicted by the LRU garbage collector. Each call
+ /// to Pin() must be balanced by a call to Unpin().
+ ///
+ public void Pin ()
+ {
+ _ = Interlocked.Increment(ref _pinCount);
+ }
+
+ ///
+ /// Decrements the pin count. When the count reaches zero, the buffer becomes eligible for eviction.
+ ///
+ public void Unpin ()
+ {
+#if DEBUG
+ var newCount = Interlocked.Decrement(ref _pinCount);
+ if (newCount < 0)
+ {
+ _logger.Warn("Unpin underflow: _pinCount went to {0}. Unbalanced Pin/Unpin calls.", newCount);
+ }
+#else
+ Interlocked.Decrement(ref _pinCount);
+#endif
+ }
+
+ ///
+ /// Adds a log line to the internal collection at the specified file position.
+ ///
+ ///
+ /// If the internal collection has reached its maximum capacity, the log line is not added. In debug builds, an
+ /// error is logged when this occurs.
+ ///
+ /// The log line to add to the collection.
+ /// The file position associated with the log line.
+ public void AddLine (LogLine lineMemory, long filePos)
+ {
+ if (LineCount < _lineArrayLength)
+ {
+ _lineArray[LineCount] = lineMemory;
+ LineCount++;
+ }
+#if DEBUG
+ else
+ {
+ _logger.Error("AddLine overflow: LineCount={0} >= _lineArrayLength={1}", LineCount, _lineArrayLength);
+ }
+#endif
+
+#if DEBUG
+ _filePositions.Add(filePos);
+#endif
+ IsDisposed = false;
+ }
+
+ ///
+ /// Removes all log lines from the current collection, resetting its state for reuse.
+ ///
+ ///
+ /// After calling this method, the collection will be empty and ready to accept new log lines. Any resources
+ /// associated with the previous log lines are released. This method is typically used to clear the log data before
+ /// loading new content or starting a new logging session.
+ ///
+ public void ClearLines ()
+ {
+ if (_lineArray == null)
+ {
+ _lineArray = ArrayPool.Shared.Rent(MAX_LINES);
+ _lineArrayLength = _lineArray.Length;
+ }
+ else
+ {
+ Array.Clear(_lineArray, 0, LineCount);
+ }
+
+ ReturnCharBlocks();
+
+ LineCount = 0;
+#if DEBUG
+ _filePositions.Clear();
+#endif
+ }
+
+ ///
+ /// Prepares the buffer for reuse from the pool.
+ ///
+ public void Reinitialise (ILogFileInfo fileInfo, int maxLines)
+ {
+ ReturnCharBlocks();
+
+ FileInfo = fileInfo;
+ MAX_LINES = maxLines;
+ StartLine = 0;
+ StartPos = 0;
+ Size = 0;
+ LineCount = 0;
+ DroppedLinesCount = 0;
+ PrevBuffersDroppedLinesSum = 0;
+ IsDisposed = false;
+ _pinCount = 0;
+ _lineArray = ArrayPool.Shared.Rent(maxLines);
+ _lineArrayLength = _lineArray.Length;
+#if DEBUG
+ _filePositions.Clear();
+ DisposeCount = 0;
+#endif
+ }
+
+ ///
+ /// Evicts the buffer content to free memory while preserving metadata (LineCount, StartLine, StartPos, Size). The
+ /// buffer remains findable in buffer list lookups and can be re-read from disk when accessed.
+ ///
+ public void EvictContent ()
+ {
+ if (_lineArray != null)
+ {
+ Array.Clear(_lineArray, 0, LineCount);
+ ArrayPool.Shared.Return(_lineArray);
+ _lineArray = null;
+ }
+
+ ReturnCharBlocks();
+
+ //! Do NOT zero LineCount — it is needed for buffer lookup in GetBufferForLineWithIndex.
+ //! Do NOT zero StartLine, StartPos, Size — they are needed for re-reading from disk.
+ IsDisposed = true;
+#if DEBUG
+ DisposeCount++;
+#endif
+ }
+
+ ///
+ /// Fully disposes the buffer content and resets all metadata. Used when the buffer is being returned to the pool or
+ /// completely removed from the buffer list.
+ ///
+ public void DisposeContent ()
+ {
+ if (_lineArray != null)
+ {
+ Array.Clear(_lineArray, 0, LineCount);
+ ArrayPool.Shared.Return(_lineArray);
+ _lineArray = null;
+ LineCount = 0;
+ }
+
+ ReturnCharBlocks();
+
+ IsDisposed = true;
+#if DEBUG
+ DisposeCount++;
+#endif
+ }
+
+ ///
+ /// Retrieves the log line at the specified index within the current memory block.
+ ///
+ ///
+ /// The zero-based index of the log line to retrieve. Must be greater than or equal to 0 and less than the total
+ /// number of lines.
+ ///
+ ///
+ /// The at the specified index if it exists; otherwise, .
+ ///
+ public LogLine? GetLineMemoryOfBlock (int num)
+ {
+ return num < LineCount && num >= 0
+ ? _lineArray[num]
+ : null;
+ }
+
+ ///
+ /// Acquires the content lock. The caller MUST call in a finally block.
+ ///
+ public void AcquireContentLock (ref bool lockTaken)
+ {
+ _contentLock.Enter(ref lockTaken);
+ }
+
+ ///
+ /// Releases the content lock previously acquired via .
+ ///
+ public void ReleaseContentLock ()
+ {
+ _contentLock.Exit(useMemoryBarrier: false);
+ }
+
+ ///
+ /// Attaches pooled char[] blocks that back the ReadOnlyMemory in this buffer's LogLine entries. These blocks will
+ /// be returned to ArrayPool when the buffer is evicted or disposed. New blocks are MERGED with existing ones —
+ /// never replace — because the buffer's existing LogLine entries still reference the old blocks (e.g., during tail
+ /// mode where multiple read sessions append lines to the same buffer).
+ ///
+ public void AttachCharBlocks (List blocks)
+ {
+ if (_charBlocks is null)
+ {
+ _charBlocks = blocks;
+ }
+ else
+ {
+ _charBlocks.AddRange(blocks);
+ }
+ }
+
+ #endregion
+
+#if DEBUG
+
+ public long DisposeCount { get; private set; }
+
+ public long GetFilePosForLineOfBlock (int line)
+ {
+ return line >= 0 && line < _filePositions.Count
+ ? _filePositions[line]
+ : -1;
+ }
+
+#endif
+
+ #region Private Methods
+
+ ///
+ /// Releases references to the character block buffers used by this instance, allowing them to be garbage collected
+ /// when no longer in use.
+ ///
+ ///
+ /// If the buffer is pinned, this method only drops the reference without returning the blocks to the array pool, as
+ /// external consumers may still hold references. This helps prevent premature reuse of buffers that may still be
+ /// accessed elsewhere.
+ ///
+ private void ReturnCharBlocks ()
+ {
+ if (_charBlocks is null)
+ {
+ return;
+ }
+
+ if (IsPinned)
+ {
+ // Buffer is pinned — UI still holds ReadOnlyMemory slices into these blocks.
+ // Don't return to ArrayPool; just drop the reference. GC will collect them
+ // once all UI references (ColumnCache, DataGridView) are released.
+ _charBlocks = null;
+ return;
+ }
+
+ foreach (var block in _charBlocks)
+ {
+ ArrayPool.Shared.Return(block);
+ }
+
+ _charBlocks = null;
+ }
+
+ #endregion
+}
\ No newline at end of file
diff --git a/src/LogExpert.Core/Classes/Log/LogBufferCacheEntry.cs b/src/LogExpert.Core/Classes/Log/Buffers/LogBufferCacheEntry.cs
similarity index 91%
rename from src/LogExpert.Core/Classes/Log/LogBufferCacheEntry.cs
rename to src/LogExpert.Core/Classes/Log/Buffers/LogBufferCacheEntry.cs
index b983cfed..7ce51e47 100644
--- a/src/LogExpert.Core/Classes/Log/LogBufferCacheEntry.cs
+++ b/src/LogExpert.Core/Classes/Log/Buffers/LogBufferCacheEntry.cs
@@ -1,4 +1,4 @@
-namespace LogExpert.Core.Classes.Log;
+namespace LogExpert.Core.Classes.Log.Buffers;
public class LogBufferCacheEntry
{
diff --git a/src/LogExpert.Core/Classes/Log/LogBufferPool.cs b/src/LogExpert.Core/Classes/Log/Buffers/LogBufferPool.cs
similarity index 95%
rename from src/LogExpert.Core/Classes/Log/LogBufferPool.cs
rename to src/LogExpert.Core/Classes/Log/Buffers/LogBufferPool.cs
index b7bbed59..fb825902 100644
--- a/src/LogExpert.Core/Classes/Log/LogBufferPool.cs
+++ b/src/LogExpert.Core/Classes/Log/Buffers/LogBufferPool.cs
@@ -2,7 +2,7 @@
using ColumnizerLib;
-namespace LogExpert.Core.Classes.Log;
+namespace LogExpert.Core.Classes.Log.Buffers;
public sealed class LogBufferPool (int maxSize)
{
diff --git a/src/LogExpert.Core/Classes/Log/Buffers/PinHandle.cs b/src/LogExpert.Core/Classes/Log/Buffers/PinHandle.cs
new file mode 100644
index 00000000..ac6d659b
--- /dev/null
+++ b/src/LogExpert.Core/Classes/Log/Buffers/PinHandle.cs
@@ -0,0 +1,34 @@
+namespace LogExpert.Core.Classes.Log.Buffers;
+
+///
+/// Tracks a set of pinned instances. Disposing the handle
+/// unpins all held buffers, making them eligible for LRU eviction.
+///
+public sealed class PinHandle : IDisposable
+{
+ private List? _pinnedBuffers;
+
+ internal PinHandle (List pinnedBuffers)
+ {
+ _pinnedBuffers = pinnedBuffers;
+ }
+
+ ///
+ /// Gets the number of buffers currently pinned by this handle.
+ ///
+ public int Count => _pinnedBuffers?.Count ?? 0;
+
+ public void Dispose ()
+ {
+ var buffers = Interlocked.Exchange(ref _pinnedBuffers, null);
+ if (buffers is null)
+ {
+ return;
+ }
+
+ foreach (var buffer in buffers)
+ {
+ buffer.Unpin();
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/LogExpert.Core/Classes/Log/CastingPipelineBuilder.cs b/src/LogExpert.Core/Classes/Log/CastingPipelineBuilder.cs
new file mode 100644
index 00000000..e0a92007
--- /dev/null
+++ b/src/LogExpert.Core/Classes/Log/CastingPipelineBuilder.cs
@@ -0,0 +1,67 @@
+using System.Collections.Concurrent;
+
+using LogExpert.Core.Interfaces;
+
+namespace LogExpert.Core.Classes.Log;
+
+public class CastingPipelineBuilder : IPipeline