Skip to content

Commit 36152b8

Browse files
committed
all the tests
1 parent e7d8fc2 commit 36152b8

File tree

6 files changed

+379
-20
lines changed

6 files changed

+379
-20
lines changed

pkgs/sdk/server-ai/src/Config/LdAiConfig.cs

Lines changed: 17 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -47,17 +47,19 @@ public Message(string content, Role role)
4747
/// </summary>
4848
public class Builder
4949
{
50-
private List<Message> _prompt;
50+
private readonly List<Message> _prompt;
5151
private bool _enabled;
52+
private Dictionary<string, object> _modelParams;
5253

5354

5455
/// <summary>
5556
/// TBD
5657
/// </summary>
5758
public Builder()
5859
{
59-
_prompt = new List<Message>();
6060
_enabled = true;
61+
_prompt = new List<Message>();
62+
_modelParams = new Dictionary<string, object>();
6163
}
6264

6365
/// <summary>
@@ -92,13 +94,25 @@ public Builder SetEnabled(bool enabled)
9294
return this;
9395
}
9496

97+
/// <summary>
98+
///
99+
/// </summary>
100+
/// <param name="key"></param>
101+
/// <param name="value"></param>
102+
/// <returns></returns>
103+
public Builder SetModelParam(string key, object value)
104+
{
105+
_modelParams[key] = value;
106+
return this;
107+
}
108+
95109
/// <summary>
96110
/// TBD
97111
/// </summary>
98112
/// <returns></returns>
99113
public LdAiConfig Build()
100114
{
101-
return new LdAiConfig(_enabled, _prompt, new Meta(), new Dictionary<string, object>());
115+
return new LdAiConfig(_enabled, _prompt, new Meta(), _modelParams);
102116
}
103117
}
104118

pkgs/sdk/server-ai/src/LdAiConfigTracker.cs

Lines changed: 59 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -26,10 +26,15 @@ public class LdAiConfigTracker : IDisposable
2626

2727
private readonly string _key;
2828

29+
private readonly LdValue _trackData;
30+
2931
private const string Duration = "$ld:ai:duration:total";
3032
private const string FeedbackPositive = "$ld:ai:feedback:user:positive";
3133
private const string FeedbackNegative = "$ld:ai:feedback:user:negative";
3234
private const string Generation = "$ld:ai:generation";
35+
private const string TokenTotal = "$ld:ai:tokens:total";
36+
private const string TokenInput = "$ld:ai:tokens:input";
37+
private const string TokenOutput = "$ld:ai:tokens:output";
3338

3439
/// <summary>
3540
///
@@ -45,11 +50,7 @@ public LdAiConfigTracker(ILaunchDarklyClient client, LdAiConfig config, Context
4550
_key = key ?? throw new ArgumentNullException(nameof(key));
4651
_context = context;
4752
Config = config ?? throw new ArgumentNullException(nameof(config));
48-
}
49-
50-
private LdValue GetTrackData()
51-
{
52-
return LdValue.ObjectFrom(new Dictionary<string, LdValue>
53+
_trackData = LdValue.ObjectFrom(new Dictionary<string, LdValue>
5354
{
5455
{ "versionKey", LdValue.Of(Config.VersionKey)},
5556
{ "configKey" , LdValue.Of(_key) }
@@ -61,7 +62,7 @@ private LdValue GetTrackData()
6162
/// </summary>
6263
/// <param name="duration"></param>
6364
public void TrackDuration(float duration) =>
64-
_client.Track(Duration, _context, GetTrackData(), duration);
65+
_client.Track(Duration, _context, _trackData, duration);
6566

6667

6768
/// <summary>
@@ -71,12 +72,18 @@ public void TrackDuration(float duration) =>
7172
/// <typeparam name="T"></typeparam>
7273
/// <returns></returns>
7374
public async Task<T> TrackDurationOfTask<T>(Task<T> task)
75+
{
76+
var result = await MeasureDurationOfTaskMs(task);
77+
TrackDuration(result.Item2);
78+
return result.Item1;
79+
}
80+
81+
private static async Task<Tuple<T, long>> MeasureDurationOfTaskMs<T>(Task<T> task)
7482
{
7583
var sw = Stopwatch.StartNew();
7684
var result = await task;
7785
sw.Stop();
78-
TrackDuration(sw.ElapsedMilliseconds);
79-
return result;
86+
return Tuple.Create(result, sw.ElapsedMilliseconds);
8087
}
8188

8289
/// <summary>
@@ -89,10 +96,10 @@ public void TrackFeedback(Feedback feedback)
8996
switch (feedback)
9097
{
9198
case Feedback.Positive:
92-
_client.Track(FeedbackPositive, _context, GetTrackData(), 1);
99+
_client.Track(FeedbackPositive, _context, _trackData, 1);
93100
break;
94101
case Feedback.Negative:
95-
_client.Track(FeedbackNegative, _context, GetTrackData(), 1);
102+
_client.Track(FeedbackNegative, _context, _trackData, 1);
96103
break;
97104
default:
98105
throw new ArgumentOutOfRangeException(nameof(feedback), feedback, null);
@@ -104,7 +111,48 @@ public void TrackFeedback(Feedback feedback)
104111
/// </summary>
105112
public void TrackSuccess()
106113
{
107-
_client.Track(Generation, _context, GetTrackData(), 1);
114+
_client.Track(Generation, _context, _trackData, 1);
115+
}
116+
117+
118+
/// <summary>
119+
///
120+
/// </summary>
121+
/// <param name="request"></param>
122+
/// <returns></returns>
123+
public async Task<ProviderResponse> TrackRequest(Task<ProviderResponse> request)
124+
{
125+
var (result, durationMs) = await MeasureDurationOfTaskMs(request);
126+
TrackSuccess();
127+
128+
TrackDuration(result.Statistics?.LatencyMs ?? durationMs);
129+
130+
if (result.Usage != null)
131+
{
132+
TrackTokens(result.Usage.Value);
133+
}
134+
135+
return result;
136+
}
137+
138+
/// <summary>
139+
///
140+
/// </summary>
141+
/// <param name="usage"></param>
142+
public void TrackTokens(Usage usage)
143+
{
144+
if (usage.Total is > 0)
145+
{
146+
_client.Track(TokenTotal, _context, _trackData, usage.Total.Value);
147+
}
148+
if (usage.Input is > 0)
149+
{
150+
_client.Track(TokenInput, _context, _trackData, usage.Input.Value);
151+
}
152+
if (usage.Output is > 0)
153+
{
154+
_client.Track(TokenOutput, _context, _trackData, usage.Output.Value);
155+
}
108156
}
109157

110158

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
namespace LaunchDarkly.Sdk.Server.Ai.Metrics;
2+
3+
4+
/// <summary>
5+
///
6+
/// </summary>
7+
/// <param name="LatencyMs"></param>
8+
public record struct Statistics(int? LatencyMs);
9+
10+
11+
/// <summary>
12+
///
13+
/// </summary>
14+
/// <param name="Total"></param>
15+
/// <param name="Input"></param>
16+
/// <param name="Output"></param>
17+
public record struct Usage(int? Total, int? Input, int? Output);
18+
19+
20+
/// <summary>
21+
///
22+
/// </summary>
23+
/// <param name="Usage"></param>
24+
/// <param name="Statistics"></param>
25+
public record struct ProviderResponse(Usage? Usage, Statistics? Statistics);

pkgs/sdk/server-ai/test/LdAiClientTest.cs

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ public void ReturnsDefaultConfigWhenFlagNotFound()
4444

4545
var tracker = client.GetModelConfig("foo", Context.New(ContextKind.Default, "key"), defaultConfig);
4646

47-
Assert.Equal( defaultConfig, tracker.Config);
47+
Assert.Equal(defaultConfig, tracker.Config);
4848
}
4949

5050
private const string MetaDisabledExplicitly = """
@@ -97,8 +97,6 @@ public void ConfigNotEnabledReturnsDisabledInstance(string json)
9797
var tracker = client.GetModelConfig("foo", Context.New(ContextKind.Default, "key"),
9898
LdAiConfig.New().AddPromptMessage("foo").Build());
9999

100-
101-
102100
Assert.False(tracker.Config.IsEnabled());
103101
}
104102

Lines changed: 64 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,64 @@
1+
using LaunchDarkly.Sdk.Server.Ai.Config;
2+
using LaunchDarkly.Sdk.Server.Ai.DataModel;
3+
using Xunit;
4+
5+
namespace LaunchDarkly.Sdk.Server.Ai;
6+
7+
public class LdAiConfigTest
8+
{
9+
[Fact]
10+
public void CanDisableAndEnableConfig()
11+
{
12+
var config1 = LdAiConfig.New().Disable().Build();
13+
Assert.False(config1.IsEnabled());
14+
15+
var config2 = LdAiConfig.New().SetEnabled(false).Build();
16+
Assert.False(config2.IsEnabled());
17+
18+
var config3 = LdAiConfig.New().Disable().SetEnabled(true).Build();
19+
Assert.True(config3.IsEnabled());
20+
21+
var config4 = LdAiConfig.New().SetEnabled(true).Disable().Build();
22+
Assert.False(config4.IsEnabled());
23+
}
24+
25+
[Fact]
26+
public void CanAddPromptMessages()
27+
{
28+
var config = LdAiConfig.New()
29+
.AddPromptMessage("Hello")
30+
.AddPromptMessage("World", Role.System)
31+
.AddPromptMessage("!", Role.Assistant)
32+
.Build();
33+
34+
Assert.Collection(config.Prompt,
35+
message =>
36+
{
37+
Assert.Equal("Hello", message.Content);
38+
Assert.Equal(Role.User, message.Role);
39+
},
40+
message =>
41+
{
42+
Assert.Equal("World", message.Content);
43+
Assert.Equal(Role.System, message.Role);
44+
},
45+
message =>
46+
{
47+
Assert.Equal("!", message.Content);
48+
Assert.Equal(Role.Assistant, message.Role);
49+
});
50+
}
51+
52+
53+
[Fact]
54+
public void CanSetModelParams()
55+
{
56+
var config = LdAiConfig.New()
57+
.SetModelParam("foo", "bar")
58+
.SetModelParam("baz", 42)
59+
.Build();
60+
61+
Assert.Equal("bar", config.Model["foo"]);
62+
Assert.Equal(42, config.Model["baz"]);
63+
}
64+
}

0 commit comments

Comments
 (0)