Skip to content

Commit 5948dbc

Browse files
.Net: Fix OllamaPromptExecutionSettings Clone() method to preserve Ollama-specific settings (#13218)
- [x] Analyze the repository structure and understand the issue - [x] Review OpenAIPromptExecutionSettings implementation as reference - [x] Build and test the current Ollama connector - [x] Add Clone() method to OllamaPromptExecutionSettings - [x] Add Freeze() method to OllamaPromptExecutionSettings - [x] Change Stop property type from List<string> to IList<string> for consistency with OpenAI - [x] Add comprehensive tests for Clone() method - [x] Add test for Freeze() method - [x] Add ServiceId property to Clone() method (code review feedback) - [x] Add test to verify ServiceId is cloned correctly - [x] Build and test to verify all changes work correctly <!-- START COPILOT CODING AGENT SUFFIX --> <details> <summary>Original prompt</summary> > > ---- > > *This section details on the original issue you should resolve* > > <issue_title>.Net: Bug: OllamaPromptExecutionSettings Clone() method does not clone Ollama specific settings</issue_title> > <issue_description>**Describe the bug** > Calling `Clone()` on `OllamaPromptExecutionSettings` instance returns `PromptExecutionSettings` with Temperature, TopK, TopP and MaxTokens settings lost > > **To Reproduce** > ```c# > var testSettings = new OllamaPromptExecutionSettings { Temperature = (float)temperature, TopP = (float)topP }; > > // Throws `InvalidCastExeception` always as testSettings is of `PromptExecutionSettings` type. > // Additionally, testSettings do not contain temperature or topP parameter values (nothing in ExtensionData either) > var result = (OllamaPromptExecutionSettings) testSettings.Clone(); > ``` > > **Expected behavior** > Object is cloned correctly like in OpenAI provider > > **Platform** > - Language: C# > - Source: 1.65.0 > - AI model: Ollama qwen3:32b > - IDE: Visual Studio > - OS: Windows > > **Additional context** > > I think it should work like `OpenAIPromptExecutionSettings`, where all information is properly Cloned. > </issue_description> > > <agent_instructions>Add the following methods to `OllamaPromptExecutionSettings.cs` > 1. `Clone` > 2. `FromExecutionSettings` > > Use the implementations in `OpenAIPromptExecutionSettings.cs` as examples for how the methods should be implemented.</agent_instructions> > > ## Comments on the Issue (you are @copilot in this section) > > <comments> > </comments> > </details> Fixes #13217 <!-- START COPILOT CODING AGENT TIPS --> --- 💡 You can make Copilot smarter by setting up custom instructions, customizing its development environment and configuring Model Context Protocol (MCP) servers. Learn more [Copilot coding agent tips](https://gh.io/copilot-coding-agent-tips) in the docs. --------- Co-authored-by: copilot-swe-agent[bot] <[email protected]> Co-authored-by: markwallace-microsoft <[email protected]>
1 parent a105e14 commit 5948dbc

File tree

3 files changed

+169
-2
lines changed

3 files changed

+169
-2
lines changed

dotnet/src/Connectors/Connectors.Ollama.UnitTests/Settings/OllamaPromptExecutionSettingsTests.cs

Lines changed: 133 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
// Copyright (c) Microsoft. All rights reserved.
22

3+
using System;
4+
using System.Collections.Generic;
35
using System.Linq;
46
using System.Text.Json;
57
using Microsoft.SemanticKernel;
@@ -82,4 +84,135 @@ public void FromExecutionSettingsShouldRestoreFunctionChoiceBehavior()
8284
// Assert
8385
Assert.Equal(functionChoiceBehavior, result.FunctionChoiceBehavior);
8486
}
87+
88+
[Fact]
89+
public void PromptExecutionSettingsCloneWorksAsExpected()
90+
{
91+
// Arrange
92+
string configPayload = """
93+
{
94+
"temperature": 0.5,
95+
"top_p": 0.9,
96+
"top_k": 100,
97+
"num_predict": 50,
98+
"stop": ["stop me"]
99+
}
100+
""";
101+
var executionSettings = JsonSerializer.Deserialize<OllamaPromptExecutionSettings>(configPayload);
102+
103+
// Act
104+
var clone = executionSettings!.Clone();
105+
106+
// Assert
107+
Assert.NotNull(clone);
108+
Assert.IsType<OllamaPromptExecutionSettings>(clone);
109+
var ollamaClone = (OllamaPromptExecutionSettings)clone;
110+
Assert.Equal(executionSettings.ModelId, ollamaClone.ModelId);
111+
Assert.Equal(executionSettings.Temperature, ollamaClone.Temperature);
112+
Assert.Equal(executionSettings.TopP, ollamaClone.TopP);
113+
Assert.Equal(executionSettings.TopK, ollamaClone.TopK);
114+
Assert.Equal(executionSettings.NumPredict, ollamaClone.NumPredict);
115+
Assert.Equal(executionSettings.Stop, ollamaClone.Stop);
116+
Assert.Equivalent(executionSettings.ExtensionData, ollamaClone.ExtensionData);
117+
}
118+
119+
[Fact]
120+
public void ClonePreservesAllOllamaSpecificSettings()
121+
{
122+
// Arrange
123+
var testSettings = new OllamaPromptExecutionSettings
124+
{
125+
Temperature = 0.7f,
126+
TopP = 0.85f,
127+
TopK = 50,
128+
NumPredict = 100,
129+
Stop = new List<string> { "END", "STOP" },
130+
ModelId = "llama2"
131+
};
132+
133+
// Act
134+
var result = (OllamaPromptExecutionSettings)testSettings.Clone();
135+
136+
// Assert
137+
Assert.NotNull(result);
138+
Assert.NotSame(testSettings, result);
139+
Assert.Equal(testSettings.Temperature, result.Temperature);
140+
Assert.Equal(testSettings.TopP, result.TopP);
141+
Assert.Equal(testSettings.TopK, result.TopK);
142+
Assert.Equal(testSettings.NumPredict, result.NumPredict);
143+
Assert.Equal(testSettings.ModelId, result.ModelId);
144+
Assert.NotSame(testSettings.Stop, result.Stop);
145+
Assert.Equal(testSettings.Stop, result.Stop);
146+
}
147+
148+
[Fact]
149+
public void CloneReturnsOllamaPromptExecutionSettingsType()
150+
{
151+
// This test verifies the exact issue from the bug report
152+
// Arrange
153+
var testSettings = new OllamaPromptExecutionSettings
154+
{
155+
Temperature = 0.7f,
156+
TopP = 0.9f,
157+
ServiceId = "test-service"
158+
};
159+
160+
// Act
161+
var cloned = testSettings.Clone();
162+
163+
// Assert - Should not throw InvalidCastException
164+
var result = (OllamaPromptExecutionSettings)cloned;
165+
Assert.NotNull(result);
166+
Assert.Equal(testSettings.Temperature, result.Temperature);
167+
Assert.Equal(testSettings.TopP, result.TopP);
168+
Assert.Equal(testSettings.ServiceId, result.ServiceId);
169+
}
170+
171+
[Fact]
172+
public void ClonePreservesServiceId()
173+
{
174+
// Arrange
175+
var testSettings = new OllamaPromptExecutionSettings
176+
{
177+
ServiceId = "my-ollama-service",
178+
ModelId = "llama2",
179+
Temperature = 0.8f
180+
};
181+
182+
// Act
183+
var cloned = (OllamaPromptExecutionSettings)testSettings.Clone();
184+
185+
// Assert
186+
Assert.Equal(testSettings.ServiceId, cloned.ServiceId);
187+
Assert.Equal(testSettings.ModelId, cloned.ModelId);
188+
Assert.Equal(testSettings.Temperature, cloned.Temperature);
189+
}
190+
191+
[Fact]
192+
public void PromptExecutionSettingsFreezeWorksAsExpected()
193+
{
194+
// Arrange
195+
var executionSettings = new OllamaPromptExecutionSettings
196+
{
197+
Temperature = 0.5f,
198+
TopP = 0.9f,
199+
TopK = 100,
200+
NumPredict = 50,
201+
Stop = new List<string> { "STOP" }
202+
};
203+
204+
// Act
205+
executionSettings.Freeze();
206+
207+
// Assert
208+
Assert.True(executionSettings.IsFrozen);
209+
Assert.Throws<InvalidOperationException>(() => executionSettings.Temperature = 1);
210+
Assert.Throws<InvalidOperationException>(() => executionSettings.TopP = 1);
211+
Assert.Throws<InvalidOperationException>(() => executionSettings.TopK = 1);
212+
Assert.Throws<InvalidOperationException>(() => executionSettings.NumPredict = 1);
213+
Assert.Throws<NotSupportedException>(() => executionSettings.Stop?.Add("END"));
214+
215+
executionSettings.Freeze(); // idempotent
216+
Assert.True(executionSettings.IsFrozen);
217+
}
85218
}

dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextGenerationService.cs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22

33
using System;
44
using System.Collections.Generic;
5+
using System.Linq;
56
using System.Net.Http;
67
using System.Runtime.CompilerServices;
78
using System.Text;

dotnet/src/Connectors/Connectors.Ollama/Settings/OllamaPromptExecutionSettings.cs

Lines changed: 35 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ public static OllamaPromptExecutionSettings FromExecutionSettings(PromptExecutio
5151
/// </summary>
5252
[JsonPropertyName("stop")]
5353
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
54-
public List<string>? Stop
54+
public IList<string>? Stop
5555
{
5656
get => this._stop;
5757

@@ -131,9 +131,42 @@ public int? NumPredict
131131
}
132132
}
133133

134+
/// <inheritdoc/>
135+
public override void Freeze()
136+
{
137+
if (this.IsFrozen)
138+
{
139+
return;
140+
}
141+
142+
base.Freeze();
143+
144+
if (this._stop is not null)
145+
{
146+
this._stop = new System.Collections.ObjectModel.ReadOnlyCollection<string>(this._stop);
147+
}
148+
}
149+
150+
/// <inheritdoc/>
151+
public override PromptExecutionSettings Clone()
152+
{
153+
return new OllamaPromptExecutionSettings()
154+
{
155+
ModelId = this.ModelId,
156+
ServiceId = this.ServiceId,
157+
ExtensionData = this.ExtensionData is not null ? new Dictionary<string, object>(this.ExtensionData) : null,
158+
Temperature = this.Temperature,
159+
TopP = this.TopP,
160+
TopK = this.TopK,
161+
NumPredict = this.NumPredict,
162+
Stop = this.Stop is not null ? new List<string>(this.Stop) : null,
163+
FunctionChoiceBehavior = this.FunctionChoiceBehavior,
164+
};
165+
}
166+
134167
#region private
135168

136-
private List<string>? _stop;
169+
private IList<string>? _stop;
137170
private float? _temperature;
138171
private float? _topP;
139172
private int? _topK;

0 commit comments

Comments
 (0)