diff --git a/dotnet/src/Microsoft.Agents.AI/Compaction/CompactionApproach.cs b/dotnet/src/Microsoft.Agents.AI/Compaction/CompactionApproach.cs
new file mode 100644
index 0000000000..1b9a23cc55
--- /dev/null
+++ b/dotnet/src/Microsoft.Agents.AI/Compaction/CompactionApproach.cs
@@ -0,0 +1,39 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Diagnostics.CodeAnalysis;
+using Microsoft.Shared.DiagnosticIds;
+
+namespace Microsoft.Agents.AI.Compaction;
+
+#pragma warning disable IDE0001 // Simplify Names for namespace in comments
+
+///
+/// Describes the compaction approach used by a pre-configured .
+///
+///
+[Experimental(DiagnosticIds.Experiments.AgentsAIExperiments)]
+public enum CompactionApproach
+{
+ ///
+ /// Applies the lightest available compaction techniques.
+ /// Collapses old tool call groups into concise summaries and uses truncation as an emergency backstop.
+ /// Does not require a summarization .
+ ///
+ Gentle,
+
+ ///
+ /// Balances context preservation with compaction efficiency.
+ /// Applies tool result collapsing, LLM-based summarization, and truncation as an emergency backstop.
+ /// Requires a summarization .
+ ///
+ Balanced,
+
+ ///
+ /// Applies the most aggressive available compaction techniques.
+ /// Applies tool result collapsing, LLM-based summarization, turn-based sliding window, and truncation.
+ /// Requires a summarization .
+ ///
+ Aggressive,
+}
+
+#pragma warning restore IDE0001 // Simplify Names
diff --git a/dotnet/src/Microsoft.Agents.AI/Compaction/CompactionSize.cs b/dotnet/src/Microsoft.Agents.AI/Compaction/CompactionSize.cs
new file mode 100644
index 0000000000..427ff6d3db
--- /dev/null
+++ b/dotnet/src/Microsoft.Agents.AI/Compaction/CompactionSize.cs
@@ -0,0 +1,38 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Diagnostics.CodeAnalysis;
+using Microsoft.Shared.DiagnosticIds;
+
+namespace Microsoft.Agents.AI.Compaction;
+
+///
+/// Describes the context-size profile used by a pre-configured .
+///
+///
+/// The size profile controls the token and message thresholds at which compaction triggers.
+/// Choose a size that matches the input token limit of your model:
+/// for smaller context windows, for common mid-range models,
+/// and for models with large context windows.
+///
+///
+[Experimental(DiagnosticIds.Experiments.AgentsAIExperiments)]
+public enum CompactionSize
+{
+ ///
+ /// Maintains a smaller context window.
+ /// Compaction triggers earlier and keeps less history in context.
+ ///
+ Compact,
+
+ ///
+ /// Maintains a medium-sized context window.
+ /// This is a reasonable default for most common models.
+ ///
+ Moderate,
+
+ ///
+ /// Maintains a large context window.
+ /// Compaction triggers later and retains more history in context.
+ ///
+ Generous,
+}
diff --git a/dotnet/src/Microsoft.Agents.AI/Compaction/CompactionStrategy.Factory.cs b/dotnet/src/Microsoft.Agents.AI/Compaction/CompactionStrategy.Factory.cs
new file mode 100644
index 0000000000..788cfd39ad
--- /dev/null
+++ b/dotnet/src/Microsoft.Agents.AI/Compaction/CompactionStrategy.Factory.cs
@@ -0,0 +1,157 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using Microsoft.Extensions.AI;
+using Microsoft.Shared.Diagnostics;
+
+namespace Microsoft.Agents.AI.Compaction;
+
+///
+/// Base class for strategies that compact a to reduce context size.
+///
+///
+///
+/// Compaction strategies operate on instances, which organize messages
+/// into atomic groups that respect the tool-call/result pairing constraint. Strategies mutate the collection
+/// in place by marking groups as excluded, removing groups, or replacing message content (e.g., with summaries).
+///
+///
+/// Every strategy requires a that determines whether compaction should
+/// proceed based on current metrics (token count, message count, turn count, etc.).
+/// The base class evaluates this trigger at the start of and skips compaction when
+/// the trigger returns .
+///
+///
+/// An optional target condition controls when compaction stops. Strategies incrementally exclude
+/// groups and re-evaluate the target after each exclusion, stopping as soon as the target returns
+/// . When no target is specified, it defaults to the inverse of the trigger —
+/// meaning compaction stops when the trigger condition would no longer fire.
+///
+///
+/// Strategies can be applied at three lifecycle points:
+///
+/// - In-run: During the tool loop, before each LLM call, to keep context within token limits.
+/// - Pre-write: Before persisting messages to storage via .
+/// - On existing storage: As a maintenance operation to compact stored history.
+///
+///
+///
+/// Multiple strategies can be composed by applying them sequentially to the same
+/// via .
+///
+///
+public abstract partial class CompactionStrategy
+{
+ ///
+ /// Creates a pre-configured from a combination of
+ /// and .
+ ///
+ ///
+ ///
+ /// The controls which strategies are included in the pipeline:
+ ///
+ /// - : tool result collapsing + truncation backstop. No required.
+ /// - : tool result collapsing + LLM summarization + truncation backstop.
+ /// - : tool result collapsing + LLM summarization + sliding window + truncation backstop.
+ ///
+ ///
+ ///
+ /// The controls the token and message thresholds at which each stage triggers.
+ /// Choose a size that matches the input token limit of your model.
+ ///
+ ///
+ ///
+ /// The compaction approach that controls which strategy or pipeline to use.
+ /// does not require a ;
+ /// and require one.
+ ///
+ ///
+ /// The context-size profile that controls token and message thresholds.
+ ///
+ ///
+ /// The used for LLM-based summarization.
+ /// Required when is or
+ /// ; ignored for .
+ ///
+ /// A configured for the specified approach and size.
+ ///
+ /// is and requires one.
+ ///
+ ///
+ /// or is not a defined enum value.
+ ///
+ public static CompactionStrategy Create(CompactionApproach approach, CompactionSize size, IChatClient? chatClient = null)
+ {
+ if (approach is CompactionApproach.Balanced or CompactionApproach.Aggressive)
+ {
+ _ = Throw.IfNull(chatClient);
+ }
+
+ int tokenLimit = GetTokenLimit(size);
+ int messageLimit = GetMessageLimit(size);
+
+ return approach switch
+ {
+ CompactionApproach.Gentle => CreateGentlePipeline(tokenLimit, messageLimit),
+ CompactionApproach.Balanced => CreateBalancedPipeline(tokenLimit, messageLimit, chatClient!),
+ CompactionApproach.Aggressive => CreateAggressivePipeline(tokenLimit, messageLimit, GetTurnLimit(size), chatClient!),
+ _ => throw new ArgumentOutOfRangeException(nameof(approach), approach, null),
+ };
+ }
+
+ private static int GetTokenLimit(CompactionSize size) => size switch
+ {
+ CompactionSize.Compact => 0x1FFF, // 8k
+ CompactionSize.Moderate => 0x7FFF, // 32k
+ CompactionSize.Generous => 0xFFFF, // 64k
+ _ => throw new ArgumentOutOfRangeException(nameof(size), size, null),
+ };
+
+ private static int GetMessageLimit(CompactionSize size) => size switch
+ {
+ CompactionSize.Compact => 50,
+ CompactionSize.Moderate => 500,
+ CompactionSize.Generous => 1000,
+ _ => throw new ArgumentOutOfRangeException(nameof(size), size, null),
+ };
+
+ private static int GetTurnLimit(CompactionSize size) => size switch
+ {
+ CompactionSize.Compact => 25,
+ CompactionSize.Moderate => 250,
+ CompactionSize.Generous => 500,
+ _ => throw new ArgumentOutOfRangeException(nameof(size), size, null),
+ };
+
+ private static PipelineCompactionStrategy CreateGentlePipeline(int tokenLimit, int messageLimit) =>
+ new(
+ new ToolResultCompactionStrategy(CompactionTriggers.MessagesExceed(messageLimit)),
+ new TruncationCompactionStrategy(CompactionTriggers.TokensExceed(tokenLimit)));
+
+ private static PipelineCompactionStrategy CreateBalancedPipeline(int tokenLimit, int messageLimit, IChatClient chatClient)
+ {
+ // Early stages trigger at two-thirds of the limit so the pipeline has room to compact
+ // incrementally before reaching the emergency truncation backstop at the full limit.
+ int earlyMessageTrigger = messageLimit * 2 / 3;
+ int earlyTokenTrigger = tokenLimit * 2 / 3;
+
+ return new(
+ new ToolResultCompactionStrategy(CompactionTriggers.MessagesExceed(earlyMessageTrigger)),
+ new SummarizationCompactionStrategy(chatClient, CompactionTriggers.TokensExceed(earlyTokenTrigger)),
+ new TruncationCompactionStrategy(CompactionTriggers.TokensExceed(tokenLimit)));
+ }
+
+ private static PipelineCompactionStrategy CreateAggressivePipeline(int tokenLimit, int messageLimit, int turnLimit, IChatClient chatClient)
+ {
+ // Early stages trigger at half the limit so compaction kicks in sooner and
+ // the sliding window and truncation backstop are reached less often.
+ int earlyMessageTrigger = messageLimit / 2;
+ int earlyTokenTrigger = tokenLimit / 2;
+
+ return new(
+ new ToolResultCompactionStrategy(CompactionTriggers.MessagesExceed(earlyMessageTrigger)),
+ new SummarizationCompactionStrategy(chatClient, CompactionTriggers.TokensExceed(earlyTokenTrigger)),
+ new SlidingWindowCompactionStrategy(CompactionTriggers.TurnsExceed(turnLimit)),
+ new TruncationCompactionStrategy(CompactionTriggers.TokensExceed(tokenLimit)));
+ }
+}
diff --git a/dotnet/src/Microsoft.Agents.AI/Compaction/CompactionStrategy.cs b/dotnet/src/Microsoft.Agents.AI/Compaction/CompactionStrategy.cs
index e6f7485438..eeaab3dfb3 100644
--- a/dotnet/src/Microsoft.Agents.AI/Compaction/CompactionStrategy.cs
+++ b/dotnet/src/Microsoft.Agents.AI/Compaction/CompactionStrategy.cs
@@ -47,7 +47,7 @@ namespace Microsoft.Agents.AI.Compaction;
///
///
[Experimental(DiagnosticIds.Experiments.AgentsAIExperiments)]
-public abstract class CompactionStrategy
+public abstract partial class CompactionStrategy
{
///
/// Initializes a new instance of the class.
diff --git a/dotnet/tests/Microsoft.Agents.AI.UnitTests/Compaction/CompactionStrategyCreateTests.cs b/dotnet/tests/Microsoft.Agents.AI.UnitTests/Compaction/CompactionStrategyCreateTests.cs
new file mode 100644
index 0000000000..3ef88173e3
--- /dev/null
+++ b/dotnet/tests/Microsoft.Agents.AI.UnitTests/Compaction/CompactionStrategyCreateTests.cs
@@ -0,0 +1,234 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Collections.Generic;
+using System.Threading;
+using System.Threading.Tasks;
+using Microsoft.Agents.AI.Compaction;
+using Microsoft.Extensions.AI;
+using Moq;
+
+namespace Microsoft.Agents.AI.UnitTests.Compaction;
+
+///
+/// Contains tests for .
+///
+public class CompactionStrategyCreateTests
+{
+ private static IChatClient CreateMockChatClient()
+ {
+ Mock mock = new();
+ mock.Setup(c => c.GetResponseAsync(
+ It.IsAny>(),
+ It.IsAny(),
+ It.IsAny()))
+ .ReturnsAsync(new ChatResponse([new ChatMessage(ChatRole.Assistant, "summary")]));
+ return mock.Object;
+ }
+
+ // ── Gentle ────────────────────────────────────────────────────────────────
+
+ [Fact]
+ public void CreateGentleCompactReturnsTwoStrategyPipeline()
+ {
+ PipelineCompactionStrategy pipeline =
+ Assert.IsType(
+ CompactionStrategy.Create(CompactionApproach.Gentle, CompactionSize.Compact));
+
+ Assert.Equal(2, pipeline.Strategies.Count);
+ Assert.IsType(pipeline.Strategies[0]);
+ Assert.IsType(pipeline.Strategies[1]);
+ }
+
+ [Fact]
+ public void CreateGentleModerateReturnsTwoStrategyPipeline()
+ {
+ PipelineCompactionStrategy pipeline =
+ Assert.IsType(
+ CompactionStrategy.Create(CompactionApproach.Gentle, CompactionSize.Moderate));
+
+ Assert.Equal(2, pipeline.Strategies.Count);
+ Assert.IsType(pipeline.Strategies[0]);
+ Assert.IsType(pipeline.Strategies[1]);
+ }
+
+ [Fact]
+ public void CreateGentleGenerousReturnsTwoStrategyPipeline()
+ {
+ PipelineCompactionStrategy pipeline =
+ Assert.IsType(
+ CompactionStrategy.Create(CompactionApproach.Gentle, CompactionSize.Generous));
+
+ Assert.Equal(2, pipeline.Strategies.Count);
+ Assert.IsType(pipeline.Strategies[0]);
+ Assert.IsType(pipeline.Strategies[1]);
+ }
+
+ [Fact]
+ public void CreateGentleDoesNotRequireChatClient()
+ {
+ // No chatClient supplied — should succeed without throwing.
+ CompactionStrategy strategy = CompactionStrategy.Create(CompactionApproach.Gentle, CompactionSize.Moderate);
+ Assert.NotNull(strategy);
+ }
+
+ // ── Balanced ──────────────────────────────────────────────────────────────
+
+ [Fact]
+ public void CreateBalancedCompactReturnsThreeStrategyPipeline()
+ {
+ PipelineCompactionStrategy pipeline =
+ Assert.IsType(
+ CompactionStrategy.Create(CompactionApproach.Balanced, CompactionSize.Compact, CreateMockChatClient()));
+
+ Assert.Equal(3, pipeline.Strategies.Count);
+ Assert.IsType(pipeline.Strategies[0]);
+ Assert.IsType(pipeline.Strategies[1]);
+ Assert.IsType(pipeline.Strategies[2]);
+ }
+
+ [Fact]
+ public void CreateBalancedModerateReturnsThreeStrategyPipeline()
+ {
+ PipelineCompactionStrategy pipeline =
+ Assert.IsType(
+ CompactionStrategy.Create(CompactionApproach.Balanced, CompactionSize.Moderate, CreateMockChatClient()));
+
+ Assert.Equal(3, pipeline.Strategies.Count);
+ Assert.IsType(pipeline.Strategies[0]);
+ Assert.IsType(pipeline.Strategies[1]);
+ Assert.IsType(pipeline.Strategies[2]);
+ }
+
+ [Fact]
+ public void CreateBalancedGenerousReturnsThreeStrategyPipeline()
+ {
+ PipelineCompactionStrategy pipeline =
+ Assert.IsType(
+ CompactionStrategy.Create(CompactionApproach.Balanced, CompactionSize.Generous, CreateMockChatClient()));
+
+ Assert.Equal(3, pipeline.Strategies.Count);
+ Assert.IsType(pipeline.Strategies[0]);
+ Assert.IsType(pipeline.Strategies[1]);
+ Assert.IsType(pipeline.Strategies[2]);
+ }
+
+ [Fact]
+ public void CreateBalancedNullChatClientThrows()
+ {
+ Assert.Throws(
+ () => CompactionStrategy.Create(CompactionApproach.Balanced, CompactionSize.Moderate, null));
+ }
+
+ // ── Aggressive ────────────────────────────────────────────────────────────
+
+ [Fact]
+ public void CreateAggressiveCompactReturnsFourStrategyPipeline()
+ {
+ PipelineCompactionStrategy pipeline =
+ Assert.IsType(
+ CompactionStrategy.Create(CompactionApproach.Aggressive, CompactionSize.Compact, CreateMockChatClient()));
+
+ Assert.Equal(4, pipeline.Strategies.Count);
+ Assert.IsType(pipeline.Strategies[0]);
+ Assert.IsType(pipeline.Strategies[1]);
+ Assert.IsType(pipeline.Strategies[2]);
+ Assert.IsType(pipeline.Strategies[3]);
+ }
+
+ [Fact]
+ public void CreateAggressiveModerateReturnsFourStrategyPipeline()
+ {
+ PipelineCompactionStrategy pipeline =
+ Assert.IsType(
+ CompactionStrategy.Create(CompactionApproach.Aggressive, CompactionSize.Moderate, CreateMockChatClient()));
+
+ Assert.Equal(4, pipeline.Strategies.Count);
+ Assert.IsType(pipeline.Strategies[0]);
+ Assert.IsType(pipeline.Strategies[1]);
+ Assert.IsType(pipeline.Strategies[2]);
+ Assert.IsType(pipeline.Strategies[3]);
+ }
+
+ [Fact]
+ public void CreateAggressiveGenerousReturnsFourStrategyPipeline()
+ {
+ PipelineCompactionStrategy pipeline =
+ Assert.IsType(
+ CompactionStrategy.Create(CompactionApproach.Aggressive, CompactionSize.Generous, CreateMockChatClient()));
+
+ Assert.Equal(4, pipeline.Strategies.Count);
+ Assert.IsType(pipeline.Strategies[0]);
+ Assert.IsType(pipeline.Strategies[1]);
+ Assert.IsType(pipeline.Strategies[2]);
+ Assert.IsType(pipeline.Strategies[3]);
+ }
+
+ [Fact]
+ public void CreateAggressiveNullChatClientThrows()
+ {
+ Assert.Throws(
+ () => CompactionStrategy.Create(CompactionApproach.Aggressive, CompactionSize.Moderate, null));
+ }
+
+ // ── Invalid enum values ───────────────────────────────────────────────────
+
+ [Fact]
+ public void CreateInvalidApproachThrows()
+ {
+ Assert.Throws(
+ () => CompactionStrategy.Create((CompactionApproach)99, CompactionSize.Moderate));
+ }
+
+ [Fact]
+ public void CreateInvalidSizeThrows()
+ {
+ Assert.Throws(
+ () => CompactionStrategy.Create(CompactionApproach.Gentle, (CompactionSize)99));
+ }
+
+ // ── Size-threshold behavioral verification ────────────────────────────────
+
+ ///
+ /// Verifies that and
+ /// configure different message thresholds.
+ ///
+ [Fact]
+ public async Task CreateGentleSizeDifferentiatesMessageThresholdsAsync()
+ {
+ // Arrange: 1 tool-call group + 99 (User, Assistant) groups = 100 groups / 200 messages.
+ // Compact ToolResult triggers at MessagesExceed(50) → 200 > 50 → fires.
+ // Moderate ToolResult triggers at MessagesExceed(500) → 200 < 500 → does not fire.
+ //
+ // The configuration preserves a number of most-recent groups, leaving the oldest
+ // tool-call group eligible for collapsing, which makes the behavioral difference
+ // between Compact and Moderate sizes observable in this test.
+ CompactionStrategy compactPipeline = CompactionStrategy.Create(CompactionApproach.Gentle, CompactionSize.Compact);
+ CompactionStrategy moderatePipeline = CompactionStrategy.Create(CompactionApproach.Gentle, CompactionSize.Moderate);
+
+ List messages =
+ [
+ // 1 tool-call group (assistant FunctionCall + tool result = 2 messages, 1 group)
+ new(ChatRole.Assistant, [new FunctionCallContent("c1", "fetch")]),
+ new(ChatRole.Tool, "data"),
+ ];
+
+ for (int index = 0; index < 99; ++index)
+ {
+ messages.Add(new(ChatRole.User, $"Q{index}"));
+ messages.Add(new(ChatRole.Assistant, $"A{index}"));
+ }
+
+ // Two separate indexes so strategies run independently.
+ CompactionMessageIndex compactIndex = CompactionMessageIndex.Create(messages);
+ CompactionMessageIndex moderateIndex = CompactionMessageIndex.Create(messages);
+
+ // Act
+ bool compactCompacted = await compactPipeline.CompactAsync(compactIndex);
+ bool moderateCompacted = await moderatePipeline.CompactAsync(moderateIndex);
+
+ // Assert
+ Assert.True(compactCompacted, "Compact size should trigger ToolResult compaction.");
+ Assert.False(moderateCompacted, "Moderate size should NOT trigger ToolResult compaction.");
+ }
+}