From 0e11584783ee255b2ff566e71f1d5e422f0c32ff Mon Sep 17 00:00:00 2001 From: redthing1 Date: Mon, 22 May 2023 14:45:04 -0700 Subject: [PATCH] make sample print usage and cleaner --- .../csharp/Gpt4All.Samples/Program.cs | 43 ++++++++++--------- 1 file changed, 22 insertions(+), 21 deletions(-) diff --git a/gpt4all-bindings/csharp/Gpt4All.Samples/Program.cs b/gpt4all-bindings/csharp/Gpt4All.Samples/Program.cs index f888d4ab..ac4ae80e 100644 --- a/gpt4all-bindings/csharp/Gpt4All.Samples/Program.cs +++ b/gpt4all-bindings/csharp/Gpt4All.Samples/Program.cs @@ -1,21 +1,22 @@ -using Gpt4All; - -var modelFactory = new Gpt4AllModelFactory(); - -var modelPath = args[0]; - -using var model = modelFactory.LoadModel(modelPath); - -var input = args.Length > 1 ? args[1] : "Name 3 colors."; - -var result = await model.GetStreamingPredictionAsync( - input, - PredictRequestOptions.Defaults); - -await foreach (var token in result.GetPredictionStreamingAsync()) -{ - Console.Write(token); -} - -Console.WriteLine(); -Console.WriteLine("DONE."); +using Gpt4All; + +var modelFactory = new Gpt4AllModelFactory(); +if (args.Length < 2) +{ + Console.WriteLine($"Usage: Gpt4All.Samples "); + return; +} + +var modelPath = args[0]; +var prompt = args[1]; + +using var model = modelFactory.LoadModel(modelPath); + +var result = await model.GetStreamingPredictionAsync( + prompt, + PredictRequestOptions.Defaults); + +await foreach (var token in result.GetPredictionStreamingAsync()) +{ + Console.Write(token); +}