Skip to content

Commit

Permalink
🔧 Fix file and directory existence check (#30)
Browse files Browse the repository at this point in the history
* 🔧 Fix file and directory existence check

- In PluginsViewModel.cs, modified the condition for checking if a file or directory exists in the Receive() method.
- In VariablesViewModel.cs, modified the logic for updating the VariablesCache dictionary.

* update version
  • Loading branch information
xbotter authored Feb 6, 2024
1 parent f683f6a commit 17c9f4a
Show file tree
Hide file tree
Showing 12 changed files with 710 additions and 707 deletions.
2 changes: 1 addition & 1 deletion Directory.Build.props
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
<Project>
<PropertyGroup>
<Nullable>enable</Nullable>
<Version>0.5.0</Version>
<Version>0.5.2</Version>
</PropertyGroup>

<PropertyGroup Condition="'$(Configuration)'=='Release'">
Expand Down
78 changes: 39 additions & 39 deletions PromptPlayground/Services/PromptService.cs
Original file line number Diff line number Diff line change
Expand Up @@ -10,56 +10,56 @@

namespace PromptPlayground.Services
{
public class PromptService
{
private readonly Kernel _kernel;
private readonly ILLMConfigViewModel model;
public class PromptService
{
private readonly Kernel _kernel;
private readonly ILLMConfigViewModel model;

public PromptService(IConfigAttributesProvider provider)
{
this.model = provider.GetLLM() ?? throw new Exception("无法创建Kernel,请检查LLM配置");
public PromptService(IConfigAttributesProvider provider)
{
this.model = provider.GetLLM() ?? throw new Exception("无法创建Kernel,请检查LLM配置");

var builder = model.CreateKernelBuilder();
var builder = model.CreateKernelBuilder();


_kernel = builder.Build();
_kernel = builder.Build();

_kernel.ImportPluginFromType<TimePlugin>();
_kernel.ImportPluginFromType<TimePlugin>();

}
}

public async Task<GenerateResult> RunAsync(string prompt, PromptExecutionSettings? config, KernelArguments arguments, CancellationToken cancellationToken = default)
{
var sw = Stopwatch.StartNew();
var func = _kernel.CreateFunctionFromPrompt(prompt, config);
public async Task<GenerateResult> RunAsync(string prompt, PromptExecutionSettings? config, KernelArguments arguments, CancellationToken cancellationToken = default)
{
var sw = Stopwatch.StartNew();
var func = _kernel.CreateFunctionFromPrompt(prompt, config);

var result = await func.InvokeAsync(_kernel, arguments);
sw.Stop();
try
{
var usage = model.GetUsage(result);
return new GenerateResult()
{
Text = result.GetValue<string>() ?? "",
TokenUsage = usage,
Elapsed = sw.Elapsed,
var result = await func.InvokeAsync(_kernel, arguments);
sw.Stop();
try
{
var usage = model.GetUsage(result);
return new GenerateResult()
{
Text = result.GetValue<string>() ?? "",
TokenUsage = usage,
Elapsed = sw.Elapsed,

};
};

}
catch (KernelException ex)
{
return new GenerateResult()
{
Text = ex.Message,
Elapsed = sw.Elapsed,
Error = ex.Message,
};
}
}
}
catch (KernelException ex)
{
return new GenerateResult()
{
Text = ex.Message,
Elapsed = sw.Elapsed,
Error = ex.Message,
};
}
}

public KernelArguments CreateArguments() => new KernelArguments();
}
public KernelArguments CreateArguments() => new KernelArguments();
}


}
278 changes: 139 additions & 139 deletions PromptPlayground/ViewModels/ConfigViewModel.cs
Original file line number Diff line number Diff line change
Expand Up @@ -16,147 +16,147 @@
namespace PromptPlayground.ViewModels;

public partial class ConfigViewModel : ViewModelBase, IConfigAttributesProvider,
IRecipient<ResultCountRequestMessage>,
IRecipient<RequestMessage<IConfigAttributesProvider>>
IRecipient<ResultCountRequestMessage>,
IRecipient<RequestMessage<IConfigAttributesProvider>>
{
private string[] RequiredAttributes =
private string[] RequiredAttributes =
[
#region LLM Config
ConfigAttribute.AzureDeployment,
ConfigAttribute.AzureEndpoint,
ConfigAttribute.AzureSecret,
ConfigAttribute.BaiduClientId,
ConfigAttribute.BaiduSecret,
ConfigAttribute.BaiduModel,
ConfigAttribute.OpenAIApiKey,
ConfigAttribute.OpenAIModel,
ConfigAttribute.DashScopeApiKey,
ConfigAttribute.DashScopeModel,
#endregion
#region LLM Config
ConfigAttribute.AzureDeployment,
ConfigAttribute.AzureEndpoint,
ConfigAttribute.AzureSecret,
ConfigAttribute.BaiduClientId,
ConfigAttribute.BaiduSecret,
ConfigAttribute.BaiduModel,
ConfigAttribute.OpenAIApiKey,
ConfigAttribute.OpenAIModel,
ConfigAttribute.DashScopeApiKey,
ConfigAttribute.DashScopeModel,
#endregion
];

public List<ConfigAttribute> AllAttributes { get; set; } = [];

[ObservableProperty]
private int maxCount = 3;

#region Model
private int modelSelectedIndex = 0;

public int ModelSelectedIndex
{
get => modelSelectedIndex; set
{
if (modelSelectedIndex != value)
{
modelSelectedIndex = value;
OnPropertyChanged(nameof(ModelSelectedIndex));
OnPropertyChanged(nameof(SelectedModel));
OnPropertyChanged(nameof(ModelAttributes));
OnPropertyChanged(nameof(SelectedModel.Name));
}
}
}

[JsonIgnore]
public List<string> ModelLists => LLMs.Select(_ => _.Name).ToList();
[JsonIgnore]
public IList<ConfigAttribute> ModelAttributes => SelectedModel.SelectAttributes(this.AllAttributes);
[JsonIgnore]
public ILLMConfigViewModel SelectedModel => LLMs[ModelSelectedIndex];
[JsonIgnore]
private readonly List<ILLMConfigViewModel> LLMs = [];
#endregion

#region IConfigAttributesProvider
IList<ConfigAttribute> IConfigAttributesProvider.AllAttributes => this.AllAttributes;
public ILLMConfigViewModel GetLLM()
{
return this.SelectedModel;
}
#endregion

public ConfigViewModel(bool requireLoadConfig = false) : this()
{
if (requireLoadConfig)
{
WeakReferenceMessenger.Default.RegisterAll(this);
LoadConfigFromUserProfile();
}
}

public ConfigViewModel()
{
this.AllAttributes = CheckAttributes(this.AllAttributes);

LLMs.Add(new AzureOpenAIConfigViewModel(this));
LLMs.Add(new BaiduConfigViewModel(this));
LLMs.Add(new OpenAIConfigViewModel(this));
LLMs.Add(new DashScopeConfigViewModel(this));
}

private void LoadConfigFromUserProfile()
{
var profile = GetConfigFilePath();
if (File.Exists(profile))
{
var vm = JsonSerializer.Deserialize<ConfigViewModel>(File.ReadAllText(profile));
if (vm != null)
{
this.AllAttributes = CheckAttributes(vm.AllAttributes);
OnPropertyChanged(nameof(AllAttributes));

this.MaxCount = vm.MaxCount;
this.ModelSelectedIndex = vm.ModelSelectedIndex;
}
}
}
private List<ConfigAttribute> CheckAttributes(List<ConfigAttribute> list)
{
foreach (var item in RequiredAttributes)
{
if (!list.Any(_ => _.Name == item))
{
list.Add(new ConfigAttribute(item));
}
}
return list;
}

private void SaveConfigToUserProfile()
{
var profile = GetConfigFilePath();
File.WriteAllText(profile, JsonSerializer.Serialize(this));
}

private string GetConfigFilePath(string configFile = "user.config")
{
var profile = Environment.GetFolderPath(Environment.SpecialFolder.UserProfile);
var folder = Path.Combine(profile, ".prompt_playground");
if (!Directory.Exists(folder))
{
Directory.CreateDirectory(folder);
}
return Path.Combine(folder, configFile);
}

public void SaveConfig()
{
SaveConfigToUserProfile();
}

public void ReloadConfig()
{
this.LoadConfigFromUserProfile();
}

public void Receive(ResultCountRequestMessage message)
{
message.Reply(this.MaxCount);
}

public void Receive(RequestMessage<IConfigAttributesProvider> message)
{
message.Reply(this);
}
public List<ConfigAttribute> AllAttributes { get; set; } = [];

[ObservableProperty]
private int maxCount = 3;

#region Model
private int modelSelectedIndex = 0;

public int ModelSelectedIndex
{
get => modelSelectedIndex; set
{
if (modelSelectedIndex != value)
{
modelSelectedIndex = value;
OnPropertyChanged(nameof(ModelSelectedIndex));
OnPropertyChanged(nameof(SelectedModel));
OnPropertyChanged(nameof(ModelAttributes));
OnPropertyChanged(nameof(SelectedModel.Name));
}
}
}

[JsonIgnore]
public List<string> ModelLists => LLMs.Select(_ => _.Name).ToList();
[JsonIgnore]
public IList<ConfigAttribute> ModelAttributes => SelectedModel.SelectAttributes(this.AllAttributes);
[JsonIgnore]
public ILLMConfigViewModel SelectedModel => LLMs[ModelSelectedIndex];
[JsonIgnore]
private readonly List<ILLMConfigViewModel> LLMs = [];
#endregion

#region IConfigAttributesProvider
IList<ConfigAttribute> IConfigAttributesProvider.AllAttributes => this.AllAttributes;
public ILLMConfigViewModel GetLLM()
{
return this.SelectedModel;
}
#endregion

public ConfigViewModel(bool requireLoadConfig = false) : this()
{
if (requireLoadConfig)
{
WeakReferenceMessenger.Default.RegisterAll(this);
LoadConfigFromUserProfile();
}
}

public ConfigViewModel()
{
this.AllAttributes = CheckAttributes(this.AllAttributes);

LLMs.Add(new AzureOpenAIConfigViewModel(this));
LLMs.Add(new BaiduConfigViewModel(this));
LLMs.Add(new OpenAIConfigViewModel(this));
LLMs.Add(new DashScopeConfigViewModel(this));
}

private void LoadConfigFromUserProfile()
{
var profile = GetConfigFilePath();
if (File.Exists(profile))
{
var vm = JsonSerializer.Deserialize<ConfigViewModel>(File.ReadAllText(profile));
if (vm != null)
{
this.AllAttributes = CheckAttributes(vm.AllAttributes);
OnPropertyChanged(nameof(AllAttributes));

this.MaxCount = vm.MaxCount;
this.ModelSelectedIndex = vm.ModelSelectedIndex;
}
}
}
private List<ConfigAttribute> CheckAttributes(List<ConfigAttribute> list)
{
foreach (var item in RequiredAttributes)
{
if (!list.Any(_ => _.Name == item))
{
list.Add(new ConfigAttribute(item));
}
}
return list;
}

private void SaveConfigToUserProfile()
{
var profile = GetConfigFilePath();
File.WriteAllText(profile, JsonSerializer.Serialize(this));
}

private string GetConfigFilePath(string configFile = "user.config")
{
var profile = Environment.GetFolderPath(Environment.SpecialFolder.UserProfile);
var folder = Path.Combine(profile, ".prompt_playground");
if (!Directory.Exists(folder))
{
Directory.CreateDirectory(folder);
}
return Path.Combine(folder, configFile);
}

public void SaveConfig()
{
SaveConfigToUserProfile();
}

public void ReloadConfig()
{
this.LoadConfigFromUserProfile();
}

public void Receive(ResultCountRequestMessage message)
{
message.Reply(this.MaxCount);
}

public void Receive(RequestMessage<IConfigAttributesProvider> message)
{
message.Reply(this);
}
}
Loading

0 comments on commit 17c9f4a

Please sign in to comment.