Skip to content

Commit

Permalink
Merge pull request #444 from tryAGI/bot/auto-format_202408260517
Browse files Browse the repository at this point in the history
style:Run dotnet format
  • Loading branch information
github-actions[bot] authored Aug 26, 2024
2 parents 7eeca6d + 1bcb5fe commit accf928
Show file tree
Hide file tree
Showing 10 changed files with 36 additions and 36 deletions.
4 changes: 2 additions & 2 deletions src/DocumentLoaders/Word/src/ExcelLoader.cs
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,11 @@ public async Task<IReadOnlyCollection<Document>> LoadAsync(
dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));

using var stream = await dataSource.GetStreamAsync(cancellationToken).ConfigureAwait(false);

var markdowns = ExcelToMarkdown.Convert(stream, firstRowIsHeader);

var metadata = settings.CollectMetadataIfRequired(dataSource);

return markdowns
.Select(x => new Document(x.Value, metadata: metadata?.With("Worksheet", x.Key)))
.ToArray();
Expand Down
16 changes: 8 additions & 8 deletions src/DocumentLoaders/Word/src/Helpers.ExcelToMarkdown.cs
Original file line number Diff line number Diff line change
Expand Up @@ -34,10 +34,10 @@ public static IList<KeyValuePair<string, string>> Convert(
{
continue;
}

var isFirstRow = true;
var builder = new StringBuilder();

foreach (var row in (document.WorkbookPart.GetPartById(sheet.Id.Value) as WorksheetPart)?.Worksheet
.GetFirstChild<SheetData>()?
.Descendants<Row>() ?? [])
Expand All @@ -60,16 +60,16 @@ public static IList<KeyValuePair<string, string>> Convert(
.ToList()) + " |");
}
}

markdowns.Add(new KeyValuePair<string, string>(
sheet.Name?.Value ?? $"Sheet{markdowns.Count}",
sheet.Name?.Value ?? $"Sheet{markdowns.Count}",
builder.ToString()));
}


return markdowns;
}

private static string GetCellValue(SharedStringTable? table, Cell cell)
{
var value = cell.CellValue?.InnerText ?? string.Empty;
Expand All @@ -80,7 +80,7 @@ private static string GetCellValue(SharedStringTable? table, Cell cell)
{
return table.ChildElements[index].InnerText;
}

return value;
}
}
12 changes: 6 additions & 6 deletions src/Helpers/GenerateDocs/Program.cs
Original file line number Diff line number Diff line change
Expand Up @@ -74,19 +74,19 @@ static async Task ConvertTestToMarkdown(string path, string outputFolder)
{
return;
}

var usings = string.Join('\n', lines
.Where(x => x.StartsWith("using"))
.ToArray());

var start = lines.IndexOf(" {");
var end = lines.IndexOf(" }");
lines = lines
.GetRange(start + 1, end - start - 1)
.Where(x => !x.Contains(".Should()"))
.Select(x => x.StartsWith(" ") ? x[8..] : x)
.ToList();

const string commentPrefix = "//// ";
var markdown = string.Empty;
var completeCode = string.Join('\n', lines.Where(x => !x.StartsWith(commentPrefix)));
Expand All @@ -101,7 +101,7 @@ static async Task ConvertTestToMarkdown(string path, string outputFolder)
{
i++;
}

var comment = string.Join('\n', lines
.GetRange(startGroup, i - startGroup)
.Select(x => x[commentPrefix.Length..]));
Expand All @@ -120,14 +120,14 @@ static async Task ConvertTestToMarkdown(string path, string outputFolder)
isFirstCode = false;
markdown += Environment.NewLine + usings + Environment.NewLine;
}

markdown += $@"
{string.Join('\n', lines
.GetRange(startGroup, i - startGroup)).Trim()}
```" + '\n';
}
}

markdown = anyComment ? @"`Scroll till the end of the page if you just want code`
" + markdown : markdown;
markdown += anyComment ? @$"
Expand Down
4 changes: 2 additions & 2 deletions src/Meta/test/WikiTests.AgentWithOllamaReact.cs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ public async Task AgentWithOllamaReact()
//// ## Using ReAct with Google search
////
//// Now you should have all necessary to connect your LLM to Google search

// var provider = new OllamaProvider(
// options: new RequestOptions
// {
Expand Down Expand Up @@ -55,7 +55,7 @@ public async Task AgentWithOllamaReact()
.UseTool(searchTool); // add the google search tool

await chain.RunAsync();

//// Lets run it and see the output:
//// As you can see, instead of giving answer right away, the model starts to think on it
//// ```
Expand Down
2 changes: 1 addition & 1 deletion src/Meta/test/WikiTests.BuildingChatWithOpenAi.cs
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ public async Task BuildingChatWithOpenAi()
Console.Write("AI: ");
Console.WriteLine(res);
}

//// Now you can run the program and try to chat with it.
////
//// The final output will look like this:
Expand Down
10 changes: 5 additions & 5 deletions src/Meta/test/WikiTests.GettingStarted.cs
Original file line number Diff line number Diff line change
Expand Up @@ -48,27 +48,27 @@ public async Task GettingStarted()
//// ***
////
//// So, finally, let's write some code!

// get model path
var modelPath = await HuggingFaceModelDownloader.GetModelAsync(
repository: "TheBloke/Thespis-13B-v0.5-GGUF",
fileName: "thespis-13b-v0.5.Q2_K.gguf",
version: "main");

//// This line will download the model and save it locally for future usage. After model is downloaded it will return path to the *.gguf file.
//// _**You can manually download any model you want and insert path to it directly. Without using HuggingFaceModelDownloader.**_
////
//// Now it's time to load our model into memory:

// load model
var model = LLamaSharpModelInstruction.FromPath(modelPath).UseConsoleForDebug();

//// Now let's build a chain!
////
//// # Building a chain
////
//// This is minimal chain to make LLM work:

// building a chain
var prompt = @"
You are an AI assistant that greets the world.
Expand All @@ -80,7 +80,7 @@ You are an AI assistant that greets the world.
| LLM(model, inputKey: "prompt");

await chain.RunAsync();

//// We can see here 2 chains(or links) working together: Set and LLM.
////
//// * Set - setting value for the _chain context variable **prompt**_
Expand Down
2 changes: 1 addition & 1 deletion src/Meta/test/WikiTests.HowToUseOpenAiProvider.cs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ public async Task HowToUseOpenAiProvider()
var result = await chain.RunAsync("text", CancellationToken.None); // execute chain and get `text` context variable
Console.WriteLine(result);
// Hello! How can I assist you today?

//// `inputKey` and `outputKey` here is more for understanding of what goes where. They have default values and can be omitted. Also there is classes like `Gpt35TurboModel` for simplicity.

//// ## Additional options
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ public async Task ImageGenerationWithOllamaAndStableDiffusion()
//// I took it from [here](https://github.com/vicuna-tools/Stablediffy/blob/main/Stablediffy.txt) with some minor modifications.
//// Basically, we are showing some examples so model could understand a principle of prompt generation. You can play around with examples and instructions to better match your preferences.
//// Now let's build a chain!

var template =
@"[INST]Transcript of a dialog, where the User interacts with an Assistant named Stablediffy. Stablediffy knows much about prompt engineering for stable diffusion (an open-source image generation software). The User asks Stablediffy about prompts for stable diffusion Image Generation.
Expand Down Expand Up @@ -102,7 +102,7 @@ public async Task ImageGenerationWithOllamaAndStableDiffusion()

// run the chain
await chain.RunAsync();

//// If everything done correctly - you should have `image.png` in your bin directory.
}
}
8 changes: 4 additions & 4 deletions src/Meta/test/WikiTests.RagWithOpenAiOllama.cs
Original file line number Diff line number Diff line change
Expand Up @@ -54,14 +54,14 @@ public async Task RagWithOpenAiOllama()
////
//// ### OpenAI
//// To use this chat and embedding model, you will need an API key from OpenAI. This has non-zero cost.

// prepare OpenAI embedding model
var provider = new OpenAiProvider(apiKey:
Environment.GetEnvironmentVariable("OPENAI_API_KEY") ??
throw new InvalidOperationException("OPENAI_API_KEY key is not set"));
var embeddingModel = new TextEmbeddingV3SmallModel(provider);
var llm = new OpenAiLatestFastChatModel(provider);

//// ### Ollama
//// To use this chat and embedding model, you will need an Ollama instance running.
//// This is free, assuming it is running locally--this code assumes it is available at https://localhost:11434.
Expand All @@ -82,7 +82,7 @@ public async Task RagWithOpenAiOllama()
var vectorCollection = await vectorDatabase.AddDocumentsFromAsync<PdfPigPdfLoader>(
embeddingModel,
dimensions: 1536, // Should be 1536 for TextEmbeddingV3SmallModel
// First, specify the source to index.
// First, specify the source to index.
dataSource: DataSource.FromPath("E:\\AI\\Datasets\\Books\\Harry-Potter-Book-1.pdf"),
collectionName: "harrypotter",
// Second, configure how to extract chunks from the bigger document.
Expand Down Expand Up @@ -131,7 +131,7 @@ public async Task RagWithOpenAiOllama()
var result = await chain.RunAsync("text", CancellationToken.None);

Console.WriteLine(result);

//// We are done! Since we previously registered for events on the completion model, the output will be printed automatically.
////
//// # Example
Expand Down
10 changes: 5 additions & 5 deletions src/Meta/test/WikiTests.UsingChainOutput.cs
Original file line number Diff line number Diff line change
Expand Up @@ -33,24 +33,24 @@ You are an AI assistant that greets the world.
//// Almost every possible link in a chain are having having at least one input and output.
////
//// Look here:

var chain =
Set(prompt, outputKey: "prompt")
| LLM(model, inputKey: "prompt", outputKey: "result");

//// This means that, after link `Set` get executed, we are storring it's result into "prompt" variable inside of chain context.
//// In its turn, link `LLM` gets "prompt" variable from chain context and uses it's as input.
////
//// `LLM` link also has output key argument. Let's use it to save the result of llm.

var result = await chain.RunAsync("result", CancellationToken.None);

//// Now the `LLM` link saves it's result into "result" variable inside of chain context. But how do we extract it from there?
////
//// `chain.Run()` method has an optional argument "resultKey". This allows you to specify variable inside of chain context to return as a result.

Console.WriteLine(result);

//// Output:
//// ```
//// Hello, World! How can I help you today?
Expand Down

0 comments on commit accf928

Please sign in to comment.