-
Notifications
You must be signed in to change notification settings - Fork 47
/
Copy pathSettings.cs
293 lines (255 loc) · 11.6 KB
/
Settings.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
using System.Diagnostics;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Text.RegularExpressions;
using AIShell.Abstraction;
using OllamaSharp;
namespace AIShell.Ollama.Agent;
internal partial class Settings
{
private bool _initialized = false;
private bool _runningConfigChecked = false;
private bool? _isRunningLocalHost = null;
private List<string> _availableModels = [];
public List<ModelConfig> Presets { get; }
public string Endpoint { get; }
public bool Stream { get; }
public ModelConfig RunningConfig { get; private set; }
public Settings(ConfigData configData)
{
if (string.IsNullOrWhiteSpace(configData.Endpoint))
{
throw new InvalidOperationException("'Endpoint' key is missing in configuration.");
}
Presets = configData.Presets ?? [];
Endpoint = configData.Endpoint;
Stream = configData.Stream;
if (string.IsNullOrEmpty(configData.DefaultPreset))
{
RunningConfig = Presets.Count > 0
? Presets[0] with { } /* No default preset - use the first one defined in Presets */
: new ModelConfig(name: nameof(RunningConfig), modelName: ""); /* No presets are defined - use empty */
}
else
{
// Ensure the default configuration is available in the list of configurations.
var first = Presets.FirstOrDefault(c => c.Name == configData.DefaultPreset)
?? throw new InvalidOperationException($"The selected default preset '{configData.DefaultPreset}' doesn't exist.");
// Use the default config
RunningConfig = first with { };
}
}
/// <summary>
/// Retrieve available models from the Ollama endpoint.
/// </summary>
/// <param name="host">Used for writing error to host when it's a local endpoint but the Ollama server is not started. When the value is null, the endpoint check will be skipped.</param>
/// <param name="cancellationToken">Used for cancel the operation.</param>
/// <returns></returns>
private async Task<bool> EnsureModelsInitialized(IHost host, CancellationToken cancellationToken = default)
{
if (_initialized)
{
return true;
}
// The endpoint check is supposed to be interactive and can be skipped in some cases, such as when
// the `PerformSelfcheck` method was already called right before entering this method.
// So, we will simply skip the endpoint check when the passed-in host is null. If there's anything
// wrong with the endpoint, the subsequent calls to retrieve models will fail and throw anyway.
if (host is not null)
{
bool success = await PerformSelfcheck(host, checkEndpointOnly: true);
if (!success)
{
return false;
}
}
using OllamaApiClient client = new(Endpoint);
var models = await client.ListLocalModelsAsync(cancellationToken).ConfigureAwait(false);
_availableModels = [.. models.Select(m => m.Name)];
_initialized = true;
return true;
}
internal async Task<ICollection<string>> GetAllModels(IHost host = null, CancellationToken cancellationToken = default)
{
if (await EnsureModelsInitialized(host, cancellationToken).ConfigureAwait(false))
{
return _availableModels;
}
return [];
}
internal void EnsureModelNameIsValid(string name)
{
ArgumentException.ThrowIfNullOrEmpty(name);
if (!_availableModels.Contains(name.AddLatestTagIfNecessery()))
{
throw new InvalidOperationException($"A model with the name '{name}' doesn't exist. The available models are: [{string.Join(", ", _availableModels)}].");
}
}
private static List<IRenderElement<string>> GetSystemPromptRenderElements() => [new CustomElement<string>(label: "System prompt", s => s)];
internal void ShowSystemPrompt(IHost host) => host.RenderList(RunningConfig.SystemPrompt, GetSystemPromptRenderElements());
internal void SetSystemPrompt(IHost host, string prompt)
{
RunningConfig = RunningConfig with { SystemPrompt = prompt ?? string.Empty };
host.RenderList(RunningConfig.SystemPrompt, GetSystemPromptRenderElements());
}
private static List<IRenderElement<string>> GetRenderModelElements(Func<string, bool> isActive) => [
new CustomElement<string>(label: "Name", m => m),
new CustomElement<string>(label: "Active", m => isActive(m) ? "true" : string.Empty)
];
internal async Task UseModel(IHost host, string name, CancellationToken cancellationToken = default)
{
if (await EnsureModelsInitialized(host, cancellationToken).ConfigureAwait(false))
{
EnsureModelNameIsValid(name);
RunningConfig = RunningConfig with { ModelName = name };
_runningConfigChecked = true;
}
}
internal async Task ListAllModels(IHost host, CancellationToken cancellationToken = default)
{
if (await EnsureModelsInitialized(host, cancellationToken).ConfigureAwait(false))
{
host.RenderTable(_availableModels, GetRenderModelElements(m => m == RunningConfig.ModelName.AddLatestTagIfNecessery()));
}
}
internal async Task ShowOneModel(IHost host, string name, CancellationToken cancellationToken = default)
{
if (await EnsureModelsInitialized(host, cancellationToken).ConfigureAwait(false))
{
EnsureModelNameIsValid(name);
host.RenderList(name, GetRenderModelElements(m => m == RunningConfig.ModelName.AddLatestTagIfNecessery()));
}
}
internal async Task UsePreset(IHost host, ModelConfig preset, CancellationToken cancellationToken = default)
{
if (await EnsureModelsInitialized(host, cancellationToken).ConfigureAwait(false))
{
EnsureModelNameIsValid(preset.ModelName);
RunningConfig = preset with { };
_runningConfigChecked = true;
}
}
internal void ListAllPresets(IHost host)
{
host.RenderTable(
Presets,
[
new PropertyElement<ModelConfig>(nameof(ModelConfig.Name)),
new CustomElement<ModelConfig>(label: "Active", m => m == RunningConfig ? "true" : string.Empty)
]);
}
internal void ShowOnePreset(IHost host, string name)
{
var preset = Presets.FirstOrDefault(c => c.Name == name);
if (preset is null)
{
host.WriteErrorLine($"The preset '{name}' doesn't exist.");
return;
}
host.RenderList(
preset,
[
new PropertyElement<ModelConfig>(nameof(ModelConfig.Name)),
new PropertyElement<ModelConfig>(nameof(ModelConfig.Description)),
new PropertyElement<ModelConfig>(nameof(ModelConfig.ModelName)),
new PropertyElement<ModelConfig>(nameof(ModelConfig.SystemPrompt)),
new CustomElement<ModelConfig>(label: "Active", m => m == RunningConfig ? "true" : string.Empty),
]);
}
internal async Task<bool> PerformSelfcheck(IHost host, bool checkEndpointOnly = false)
{
_isRunningLocalHost ??= IsLocalHost().IsMatch(new Uri(Endpoint).Host);
if (_isRunningLocalHost is true && Process.GetProcessesByName("ollama").Length is 0)
{
host.WriteErrorLine("Please be sure the Ollama is installed and server is running. Check all the prerequisites in the README of this agent are met.");
return false;
}
if (!checkEndpointOnly && !_runningConfigChecked)
{
// Skip the endpoint check in 'EnsureModelsInitialized' as we already did it.
await EnsureModelsInitialized(host: null).ConfigureAwait(false);
if (string.IsNullOrEmpty(RunningConfig.ModelName))
{
// There is no model set, so use the first one available.
if (_availableModels.Count is 0)
{
host.WriteErrorLine($"No models are available to use from '{Endpoint}'.");
return false;
}
RunningConfig = RunningConfig with { ModelName = _availableModels.First() };
host.MarkupLine($"No Ollama model is configured. Using the first available model [green]'{RunningConfig.ModelName}'[/].");
}
else
{
try
{
EnsureModelNameIsValid(RunningConfig.ModelName);
}
catch (InvalidOperationException e)
{
host.WriteErrorLine(e.Message);
return false;
}
}
_runningConfigChecked = true;
}
return true;
}
/// <summary>
/// Defines a generated regular expression to match localhost addresses
/// "localhost", "127.0.0.1" and "[::1]" with case-insensitivity.
/// </summary>
[GeneratedRegex("^(localhost|127\\.0\\.0\\.1|\\[::1\\])$", RegexOptions.IgnoreCase)]
internal partial Regex IsLocalHost();
}
/// <summary>
/// Represents a configuration for an Ollama model.
/// </summary>
internal record ModelConfig
{
[JsonRequired]
public string Name { get; init; }
[JsonRequired]
public string ModelName { get; init; }
public string SystemPrompt { get; init; } = string.Empty;
public string Description { get; init; } = string.Empty;
/// <summary>
/// Initializes a new instance of the <see cref="ModelConfig"/> class with the specified parameters.
/// </summary>
/// <param name="name">The name of the model configuration.</param>
/// <param name="modelName">The name of the model to be used.</param>
/// <param name="systemPrompt">An optional system prompt to guide the model's behavior. Defaults to an empty string.</param>
/// <param name="description">An optional description of the model configuration. Defaults to an empty string.</param>
public ModelConfig(string name, string modelName, string systemPrompt = "", string description = "")
{
Name = name;
ModelName = modelName;
SystemPrompt = systemPrompt;
Description = description;
}
}
/// <summary>
/// Represents the configuration data for the AI Shell Ollama Agent.
/// </summary>
/// <param name="Presets">Optional. A list of predefined model configurations.</param>
/// <param name="Endpoint">Optional. The endpoint URL for the agent. Defaults to "http://localhost:11434"
/// <param name="Stream">Optional. Indicates whether streaming is enabled. Defaults to <c>false</c>.</param>
/// <param name="DefaultPreset">Optional. Specifies the default preset name. If not provided, the first available preset will be used.</param>
internal record ConfigData(List<ModelConfig> Presets, string Endpoint = "http://localhost:11434", bool Stream = false, string DefaultPreset = "");
/// <summary>
/// Use source generation to serialize and deserialize the setting file.
/// Both metadata-based and serialization-optimization modes are used to gain the best performance.
/// </summary>
[JsonSourceGenerationOptions(
WriteIndented = true,
AllowTrailingCommas = true,
PropertyNameCaseInsensitive = true,
ReadCommentHandling = JsonCommentHandling.Skip,
UseStringEnumConverter = true)]
[JsonSerializable(typeof(ConfigData))]
internal partial class SourceGenerationContext : JsonSerializerContext { }
static class TagExtensions
{
public static string AddLatestTagIfNecessery(this string model) =>
model.Contains(':') ? model : string.Concat(model, ":latest");
}