Skip to content

Commit ec3c179

Browse files
committed
fix(cli): Fixed issue with custom formats.
1 parent 9329bbb commit ec3c179

File tree

3 files changed

+39
-10
lines changed

3 files changed

+39
-10
lines changed

src/Cli/src/Commands/DoCommand.cs

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,8 @@ internal sealed class DoCommand : Command
1919
var inputOption = CommonOptions.Input;
2020
var inputFileOption = CommonOptions.InputFile;
2121
var outputFileOption = CommonOptions.OutputFile;
22+
var debugOption = CommonOptions.Debug;
23+
var modelOption = CommonOptions.Model;
2224
var toolsOption = new Option<string[]>(
2325
aliases: ["--tools", "-t"],
2426
parseArgument: result => [.. result.Tokens.SelectMany(t => t.Value.Split(','))],
@@ -37,14 +39,16 @@ internal sealed class DoCommand : Command
3739
AddOption(toolsOption);
3840
AddOption(directoriesOption);
3941
AddOption(formatOption);
42+
AddOption(debugOption);
43+
AddOption(modelOption);
4044

41-
this.SetHandler(HandleAsync, inputOption, inputFileOption, outputFileOption, toolsOption, directoriesOption, formatOption);
45+
this.SetHandler(HandleAsync, inputOption, inputFileOption, outputFileOption, toolsOption, directoriesOption, formatOption, debugOption, modelOption);
4246
}
4347

44-
private static async Task HandleAsync(string input, string inputPath, string outputPath, string[] tools, string[] directories, string format)
48+
private static async Task HandleAsync(string input, string inputPath, string outputPath, string[] tools, string[] directories, string format, bool debug, string model)
4549
{
4650
var inputText = await Helpers.ReadInputAsync(input, inputPath).ConfigureAwait(false);
47-
var llm = await Helpers.GetChatModelAsync().ConfigureAwait(false);
51+
var llm = await Helpers.GetChatModelAsync(model, debug).ConfigureAwait(false);
4852

4953
var clients = await Task.WhenAll(tools.Select(async tool =>
5054
{
@@ -96,7 +100,7 @@ private static async Task HandleAsync(string input, string inputPath, string out
96100
Debug.WriteLine($" {aiTool.Description}");
97101
}
98102

99-
var response = await llm.GetResponseAsync<string>(
103+
var response = await llm.GetResponseAsync(
100104
inputText,
101105
new ChatOptions
102106
{

src/Cli/src/CommonOptions.cs

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,4 +18,14 @@ internal static class CommonOptions
1818
aliases: ["--output-file"],
1919
getDefaultValue: () => string.Empty,
2020
description: "Output file path");
21+
22+
public static Option<bool> Debug => new(
23+
aliases: ["--debug"],
24+
getDefaultValue: () => false,
25+
description: "Show Debug Information");
26+
27+
public static Option<string> Model => new(
28+
aliases: ["--model"],
29+
getDefaultValue: () => "o3-mini",
30+
description: "Model to use for commands. Default is o3-mini.");
2131
}

src/Cli/src/Helpers.cs

Lines changed: 21 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -66,13 +66,19 @@ public static async Task AuthenticateWithApiKeyAsync(string apiKey, string model
6666
await SetModelAsync(model).ConfigureAwait(false);
6767
}
6868

69-
public static async Task<IChatClient> GetChatModelAsync(CancellationToken cancellationToken = default)
69+
public static async Task<IChatClient> GetChatModelAsync(string? model = null, bool debug = false, CancellationToken cancellationToken = default)
7070
{
7171
var settingsFolder = GetSettingsFolder();
7272

7373
var provider = await File.ReadAllTextAsync(Path.Combine(settingsFolder, "provider.txt"), cancellationToken)
7474
.ConfigureAwait(false);
75-
var modelId = await File.ReadAllTextAsync(Path.Combine(settingsFolder, "model.txt"), cancellationToken).ConfigureAwait(false);
75+
var modelId = model ?? await File.ReadAllTextAsync(Path.Combine(settingsFolder, "model.txt"), cancellationToken).ConfigureAwait(false);
76+
if (debug)
77+
{
78+
Console.WriteLine("Using provider: " + provider);
79+
Console.WriteLine("Using model: " + modelId);
80+
}
81+
7682
IChatClient chatClient;
7783
Uri? endpoint = provider switch
7884
{
@@ -81,7 +87,7 @@ public static async Task<IChatClient> GetChatModelAsync(CancellationToken cancel
8187
};
8288
modelId = modelId switch
8389
{
84-
"latest-fast" => tryAGI.OpenAI.CreateChatCompletionRequestModelExtensions.ToValueString(tryAGI.OpenAI.ChatClient.LatestFastModel),
90+
"latest-fast" => "o3-mini",
8591
"latest-smart" => tryAGI.OpenAI.CreateChatCompletionRequestModelExtensions.ToValueString(tryAGI.OpenAI.ChatClient.LatestSmartModel),
8692
_ => modelId,
8793
};
@@ -104,7 +110,16 @@ public static async Task<IChatClient> GetChatModelAsync(CancellationToken cancel
104110
}
105111

106112
using var factory = LoggerFactory.Create(builder =>
107-
builder.AddDebug().SetMinimumLevel(LogLevel.Trace));
113+
{
114+
if (debug)
115+
{
116+
builder.AddConsole().SetMinimumLevel(LogLevel.Trace);
117+
}
118+
else
119+
{
120+
builder.AddDebug().SetMinimumLevel(LogLevel.Trace);
121+
}
122+
});
108123
var client = new ChatClientBuilder(chatClient)
109124
// 👇🏼 Add logging to the chat client, wrapping the function invocation client
110125
.UseLogging(factory)
@@ -115,9 +130,9 @@ public static async Task<IChatClient> GetChatModelAsync(CancellationToken cancel
115130
return client;
116131
}
117132

118-
public static async Task<string> GenerateUsingAuthenticatedModelAsync(string prompt, CancellationToken cancellationToken = default)
133+
public static async Task<string> GenerateUsingAuthenticatedModelAsync(string prompt, bool debug = false, CancellationToken cancellationToken = default)
119134
{
120-
IChatClient model = await GetChatModelAsync(cancellationToken).ConfigureAwait(false);
135+
IChatClient model = await GetChatModelAsync(null, debug, cancellationToken).ConfigureAwait(false);
121136

122137
var response = await model.GetResponseAsync(prompt, cancellationToken: cancellationToken).ConfigureAwait(false);
123138

0 commit comments

Comments
 (0)