Skip to content

Commit

Permalink
feat: optimize auto-download implementation.
Browse files Browse the repository at this point in the history
  • Loading branch information
AsakusaRinne committed May 22, 2024
1 parent eb06f62 commit b8a8f9e
Show file tree
Hide file tree
Showing 6 changed files with 64 additions and 40 deletions.
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
using LLama.Native;
using LLama.Experimental.Native;

namespace LLama.Native
Expand Down Expand Up @@ -36,7 +35,7 @@ public static NativeLibraryConfig WithAutoDownload(this NativeLibraryConfig conf
// Also, we need to set the default local directory if the user does not.
if (string.IsNullOrEmpty(settings.Tag))
{
settings = settings.WithTag(GetCommitHash(NativeLibraryConfig.CurrentVersion));
settings = settings.WithTag(GetNativeLibraryCommitHash());
}
var defaultLocalDir = NativeLibraryDownloadSettings.GetDefaultLocalDir(settings.Tag);
settings = settings.WithLocalDir(settings.LocalDir ?? defaultLocalDir);
Expand All @@ -52,17 +51,9 @@ public static NativeLibraryConfig WithAutoDownload(this NativeLibraryConfig conf
return config;
}

private static string GetCommitHash(string version)
{
if (NativeLibraryConfig.VersionMap.TryGetValue(version, out var hash))
{
return hash;
}
else
{
return version;
}
}
private const string COMMIT_HASH = "a743d7";

private static string GetNativeLibraryCommitHash() => COMMIT_HASH;

/// <summary>
/// Set whether to download the best-matched native library file automatically if there's no backend or specified file to load.
Expand All @@ -77,10 +68,7 @@ private static string GetCommitHash(string version)
public static NativeLibraryConfigContainer WithAutoDownload(this NativeLibraryConfigContainer container,
bool enable = true, NativeLibraryDownloadSettings? settings = null)
{
foreach(var config in container.Configs)
{
config.WithAutoDownload(enable, settings);
}
container.ForEach((config) => config.WithAutoDownload(enable, settings));
return container;
}
}
Expand Down
25 changes: 24 additions & 1 deletion LLama.Experimental/LLama.Experimental.csproj
Original file line number Diff line number Diff line change
@@ -1,11 +1,34 @@
<Project Sdk="Microsoft.NET.Sdk">
<Project Sdk="Microsoft.NET.Sdk">

<PropertyGroup>
<TargetFrameworks>net6;net7;net8;netstandard2.0</TargetFrameworks>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<langversion>12</langversion>
<RootNamespace>LLama</RootNamespace>

<Version>0.12.0</Version>
<Authors>Rinne</Authors>
<Company>SciSharp STACK</Company>
<GeneratePackageOnBuild>true</GeneratePackageOnBuild>
<Copyright>MIT, SciSharp STACK $([System.DateTime]::UtcNow.ToString(yyyy))</Copyright>
<RepositoryUrl>https://github.com/SciSharp/LLamaSharp</RepositoryUrl>
<RepositoryType>git</RepositoryType>
<PackageIconUrl>https://avatars3.githubusercontent.com/u/44989469?s=200&amp;v=4</PackageIconUrl>
<PackageTags>LLama, LLM, GPT, ChatGPT, NLP, AI, Chat Bot, SciSharp</PackageTags>
<Description>
LLamaSharp.Experimental is a package with some experimental features and aggressive updates.
This package includes some important features in advance, but is less stable.
</Description>
<PackageReleaseNotes>
Support native library auto-download.
</PackageReleaseNotes>
<PackageLicenseExpression>MIT</PackageLicenseExpression>
<PackageOutputPath>$(SolutionDir)/packages</PackageOutputPath>
<Platforms>AnyCPU;x64;Arm64</Platforms>
<PackageId>LLamaSharp.Experimental</PackageId>
<Configurations>Debug;Release;GPU</Configurations>
<GenerateAssemblyInfo>false</GenerateAssemblyInfo>
</PropertyGroup>

<ItemGroup>
Expand Down
5 changes: 1 addition & 4 deletions LLama.Experimental/Native/SelectingPolicyWithAutoDownload.cs
Original file line number Diff line number Diff line change
@@ -1,8 +1,5 @@
using LLama.Abstractions;
using LLama.Abstractions;
using LLama.Native;
using System;
using System.Collections.Generic;
using System.Text;

namespace LLama.Experimental.Native
{
Expand Down
2 changes: 1 addition & 1 deletion LLama/LLamaSharp.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
Updated llama.cpp version to include better support for LLama3 tokenization.
</PackageReleaseNotes>
<PackageLicenseExpression>MIT</PackageLicenseExpression>
<PackageOutputPath>packages</PackageOutputPath>
<PackageOutputPath>$(SolutionDir)/packages</PackageOutputPath>
<Platforms>AnyCPU;x64;Arm64</Platforms>
<PackageId>LLamaSharp</PackageId>
<Configurations>Debug;Release;GPU</Configurations>
Expand Down
29 changes: 13 additions & 16 deletions LLama/Native/Load/NativeLibraryConfig.cs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.Linq;
using LLama.Abstractions;
Expand Down Expand Up @@ -282,21 +282,6 @@ public sealed partial class NativeLibraryConfig
/// </summary>
public static NativeLibraryConfig LLava { get; }


/// <summary>
/// The current version.
/// </summary>
public static string CurrentVersion => VERSION; // This should be changed before publishing new version. TODO: any better approach?

private const string COMMIT_HASH = "f7001c";
private const string VERSION = "master";

/// <summary>
/// Get the llama.cpp commit hash of the current version.
/// </summary>
/// <returns></returns>
public static string GetNativeLibraryCommitHash() => COMMIT_HASH;

static NativeLibraryConfig()
{
LLama = new(NativeLibraryName.LLama);
Expand Down Expand Up @@ -387,6 +372,18 @@ internal NativeLibraryConfigContainer(params NativeLibraryConfig[] configs)
_configs = configs;
}

/// <summary>
/// Do an action for all the configs in this container.
/// </summary>
/// <param name="action"></param>
public void ForEach(Action<NativeLibraryConfig> action)
{
foreach (var config in _configs)
{
action(config);
}
}

#region configurators

#if NET6_0_OR_GREATER
Expand Down
21 changes: 20 additions & 1 deletion LLamaSharp.sln
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@ EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LLamaSharp.KernelMemory", "LLama.KernelMemory\LLamaSharp.KernelMemory.csproj", "{E5589AE7-B86F-4343-A1CC-8E5D34596E52}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LLama.Experimental", "LLama.Experimental\LLama.Experimental.csproj", "{BE4F977B-D4D9-472F-B506-EAE17542A810}"
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "LLama.Benchmark", "LLama.Benchmark\LLama.Benchmark.csproj", "{90D38FEE-68EA-459E-A4EE-268B9DFA1CD5}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LLama.Benchmark", "LLama.Benchmark\LLama.Benchmark.csproj", "{90D38FEE-68EA-459E-A4EE-268B9DFA1CD5}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Expand Down Expand Up @@ -177,6 +178,24 @@ Global
{BE4F977B-D4D9-472F-B506-EAE17542A810}.Release|Arm64.Build.0 = Release|Any CPU
{BE4F977B-D4D9-472F-B506-EAE17542A810}.Release|x64.ActiveCfg = Release|Any CPU
{BE4F977B-D4D9-472F-B506-EAE17542A810}.Release|x64.Build.0 = Release|Any CPU
{90D38FEE-68EA-459E-A4EE-268B9DFA1CD5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{90D38FEE-68EA-459E-A4EE-268B9DFA1CD5}.Debug|Any CPU.Build.0 = Debug|Any CPU
{90D38FEE-68EA-459E-A4EE-268B9DFA1CD5}.Debug|Arm64.ActiveCfg = Debug|Any CPU
{90D38FEE-68EA-459E-A4EE-268B9DFA1CD5}.Debug|Arm64.Build.0 = Debug|Any CPU
{90D38FEE-68EA-459E-A4EE-268B9DFA1CD5}.Debug|x64.ActiveCfg = Debug|Any CPU
{90D38FEE-68EA-459E-A4EE-268B9DFA1CD5}.Debug|x64.Build.0 = Debug|Any CPU
{90D38FEE-68EA-459E-A4EE-268B9DFA1CD5}.GPU|Any CPU.ActiveCfg = Debug|Any CPU
{90D38FEE-68EA-459E-A4EE-268B9DFA1CD5}.GPU|Any CPU.Build.0 = Debug|Any CPU
{90D38FEE-68EA-459E-A4EE-268B9DFA1CD5}.GPU|Arm64.ActiveCfg = Debug|Any CPU
{90D38FEE-68EA-459E-A4EE-268B9DFA1CD5}.GPU|Arm64.Build.0 = Debug|Any CPU
{90D38FEE-68EA-459E-A4EE-268B9DFA1CD5}.GPU|x64.ActiveCfg = Debug|Any CPU
{90D38FEE-68EA-459E-A4EE-268B9DFA1CD5}.GPU|x64.Build.0 = Debug|Any CPU
{90D38FEE-68EA-459E-A4EE-268B9DFA1CD5}.Release|Any CPU.ActiveCfg = Release|Any CPU
{90D38FEE-68EA-459E-A4EE-268B9DFA1CD5}.Release|Any CPU.Build.0 = Release|Any CPU
{90D38FEE-68EA-459E-A4EE-268B9DFA1CD5}.Release|Arm64.ActiveCfg = Release|Any CPU
{90D38FEE-68EA-459E-A4EE-268B9DFA1CD5}.Release|Arm64.Build.0 = Release|Any CPU
{90D38FEE-68EA-459E-A4EE-268B9DFA1CD5}.Release|x64.ActiveCfg = Release|Any CPU
{90D38FEE-68EA-459E-A4EE-268B9DFA1CD5}.Release|x64.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
Expand Down

0 comments on commit b8a8f9e

Please sign in to comment.