Browse Source

feat(ai): Add Ollama Integration

pull/1454/head
colin 2 days ago
parent
commit
32b9d24b01
  1. 3
      aspnet-core/modules/ai/LINGYUN.Abp.AI.Ollama/FodyWeavers.xml
  2. 30
      aspnet-core/modules/ai/LINGYUN.Abp.AI.Ollama/FodyWeavers.xsd
  3. 27
      aspnet-core/modules/ai/LINGYUN.Abp.AI.Ollama/LINGYUN.Abp.AI.Ollama.csproj
  4. 17
      aspnet-core/modules/ai/LINGYUN.Abp.AI.Ollama/LINGYUN/Abp/AI/Ollama/AbpAIOllamaModule.cs
  5. 51
      aspnet-core/modules/ai/LINGYUN.Abp.AI.Ollama/LINGYUN/Abp/AI/Ollama/OllamaChatClientProvider.cs
  6. 39
      aspnet-core/modules/ai/LINGYUN.Abp.AI.Ollama/LINGYUN/Abp/AI/Ollama/OllamaKernelProvider.cs

3
aspnet-core/modules/ai/LINGYUN.Abp.AI.Ollama/FodyWeavers.xml

@ -0,0 +1,3 @@
<Weavers xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="FodyWeavers.xsd">
<ConfigureAwait ContinueOnCapturedContext="false" />
</Weavers>

30
aspnet-core/modules/ai/LINGYUN.Abp.AI.Ollama/FodyWeavers.xsd

@ -0,0 +1,30 @@
<?xml version="1.0" encoding="utf-8"?>
<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema">
<!-- This file was generated by Fody. Manual changes to this file will be lost when your project is rebuilt. -->
<xs:element name="Weavers">
<xs:complexType>
<xs:all>
<xs:element name="ConfigureAwait" minOccurs="0" maxOccurs="1">
<xs:complexType>
<xs:attribute name="ContinueOnCapturedContext" type="xs:boolean" />
</xs:complexType>
</xs:element>
</xs:all>
<xs:attribute name="VerifyAssembly" type="xs:boolean">
<xs:annotation>
<xs:documentation>'true' to run assembly verification (PEVerify) on the target assembly after all weavers have been executed.</xs:documentation>
</xs:annotation>
</xs:attribute>
<xs:attribute name="VerifyIgnoreCodes" type="xs:string">
<xs:annotation>
<xs:documentation>A comma-separated list of error codes that can be safely ignored in assembly verification.</xs:documentation>
</xs:annotation>
</xs:attribute>
<xs:attribute name="GenerateXsd" type="xs:boolean">
<xs:annotation>
<xs:documentation>'false' to turn off automatic generation of the XML Schema file.</xs:documentation>
</xs:annotation>
</xs:attribute>
</xs:complexType>
</xs:element>
</xs:schema>

27
aspnet-core/modules/ai/LINGYUN.Abp.AI.Ollama/LINGYUN.Abp.AI.Ollama.csproj

@ -0,0 +1,27 @@
<Project Sdk="Microsoft.NET.Sdk">
<Import Project="..\..\..\..\configureawait.props" />
<Import Project="..\..\..\..\common.props" />
<PropertyGroup>
<TargetFrameworks>netstandard2.0;netstandard2.1;net8.0;net9.0;net10.0</TargetFrameworks>
<AssemblyName>LINGYUN.Abp.AI.Ollama</AssemblyName>
<PackageId>LINGYUN.Abp.AI.Ollama</PackageId>
<NoWarn>$(NoWarn);SKEXP0001,SKEXP0010,SKEXP0070</NoWarn>
<GenerateAssemblyConfigurationAttribute>false</GenerateAssemblyConfigurationAttribute>
<GenerateAssemblyCompanyAttribute>false</GenerateAssemblyCompanyAttribute>
<GenerateAssemblyProductAttribute>false</GenerateAssemblyProductAttribute>
<Nullable>enable</Nullable>
<RootNamespace />
</PropertyGroup>
<ItemGroup>
<PackageReference Include="OllamaSharp" />
<PackageReference Include="Microsoft.SemanticKernel.Connectors.Ollama" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\LINGYUN.Abp.AI.Core\LINGYUN.Abp.AI.Core.csproj" />
</ItemGroup>
</Project>

17
aspnet-core/modules/ai/LINGYUN.Abp.AI.Ollama/LINGYUN/Abp/AI/Ollama/AbpAIOllamaModule.cs

@ -0,0 +1,17 @@
using Volo.Abp.Modularity;
namespace LINGYUN.Abp.AI.Ollama;
[DependsOn(typeof(AbpAICoreModule))]
public class AbpAIOllamaModule : AbpModule
{
public override void ConfigureServices(ServiceConfigurationContext context)
{
Configure<AbpAICoreOptions>(options =>
{
options.ChatClientProviders.Add<OllamaChatClientProvider>();
options.KernelProviders.Add<OllamaKernelProvider>();
});
}
}

51
aspnet-core/modules/ai/LINGYUN.Abp.AI.Ollama/LINGYUN/Abp/AI/Ollama/OllamaChatClientProvider.cs

@ -0,0 +1,51 @@
using LINGYUN.Abp.AI.Models;
using LINGYUN.Abp.AI.Workspaces;
using Microsoft.Extensions.AI;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Options;
using OllamaSharp;
using System;
using System.Threading.Tasks;
using Volo.Abp;
namespace LINGYUN.Abp.AI.Ollama;
public class OllamaChatClientProvider : ChatClientProvider
{
public const string DefaultEndpoint = "http://localhost:11434";
public const string ProviderName = "Ollama";
public override string Name => ProviderName;
public OllamaChatClientProvider(
IServiceProvider serviceProvider) : base(serviceProvider)
{
}
public async override Task<IChatClient> CreateAsync(WorkspaceDefinition workspace)
{
Check.NotNull(workspace, nameof(workspace));
var options = ServiceProvider.GetRequiredService<IOptions<AbpAICoreOptions>>().Value;
var ollamaApiClient = new OllamaApiClient(workspace.ApiBaseUrl ?? DefaultEndpoint);
var chatClientBuilder = ChatClientBuilderChatClientExtensions.AsBuilder(ollamaApiClient);
foreach (var handlerAction in options.ChatClientBuildActions)
{
await handlerAction(workspace, ServiceProvider, chatClientBuilder);
}
return chatClientBuilder
.UseLogging()
.UseOpenTelemetry()
.UseFunctionInvocation()
.UseDistributedCache()
.Build(ServiceProvider);
}
public override ChatModel[] GetModels()
{
return [];
}
}

39
aspnet-core/modules/ai/LINGYUN.Abp.AI.Ollama/LINGYUN/Abp/AI/Ollama/OllamaKernelProvider.cs

@ -0,0 +1,39 @@
using LINGYUN.Abp.AI.Workspaces;
using Microsoft.Extensions.AI;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Options;
using Microsoft.SemanticKernel;
using OllamaSharp;
using System;
using System.Threading.Tasks;
using Volo.Abp;
namespace LINGYUN.Abp.AI.Ollama;
public class OllamaKernelProvider : KernelProvider
{
public const string ProviderName = "Ollama";
public override string Name => ProviderName;
public OllamaKernelProvider(IServiceProvider serviceProvider) : base(serviceProvider)
{
}
public override Task<Kernel> CreateAsync(WorkspaceDefinition workspace)
{
Check.NotNull(workspace, nameof(workspace));
var options = ServiceProvider.GetRequiredService<IOptions<AbpAICoreOptions>>().Value;
var ollamaApiClient = new OllamaApiClient(workspace.ApiBaseUrl ?? OllamaChatClientProvider.DefaultEndpoint);
var kernelBuilder = Kernel.CreateBuilder()
.AddOllamaChatClient(ollamaApiClient);
foreach (var handlerAction in options.KernelBuildActions)
{
handlerAction(workspace, kernelBuilder);
}
return Task.FromResult(kernelBuilder.Build());
}
}
Loading…
Cancel
Save