mirror of
https://github.com/nomic-ai/gpt4all
synced 2024-11-06 09:20:33 +00:00
make sample print usage and cleaner
This commit is contained in:
parent
448acb337d
commit
0e11584783
@ -1,21 +1,22 @@
|
||||
using Gpt4All;
|
||||
|
||||
var modelFactory = new Gpt4AllModelFactory();
|
||||
|
||||
var modelPath = args[0];
|
||||
|
||||
using var model = modelFactory.LoadModel(modelPath);
|
||||
|
||||
var input = args.Length > 1 ? args[1] : "Name 3 colors.";
|
||||
|
||||
var result = await model.GetStreamingPredictionAsync(
|
||||
input,
|
||||
PredictRequestOptions.Defaults);
|
||||
|
||||
await foreach (var token in result.GetPredictionStreamingAsync())
|
||||
{
|
||||
Console.Write(token);
|
||||
}
|
||||
|
||||
Console.WriteLine();
|
||||
Console.WriteLine("DONE.");
|
||||
using Gpt4All;
|
||||
|
||||
var modelFactory = new Gpt4AllModelFactory();
|
||||
if (args.Length < 2)
|
||||
{
|
||||
Console.WriteLine($"Usage: Gpt4All.Samples <model-path> <prompt>");
|
||||
return;
|
||||
}
|
||||
|
||||
var modelPath = args[0];
|
||||
var prompt = args[1];
|
||||
|
||||
using var model = modelFactory.LoadModel(modelPath);
|
||||
|
||||
var result = await model.GetStreamingPredictionAsync(
|
||||
prompt,
|
||||
PredictRequestOptions.Defaults);
|
||||
|
||||
await foreach (var token in result.GetPredictionStreamingAsync())
|
||||
{
|
||||
Console.Write(token);
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user