diff --git a/aspnet-core/modules/ai/LINGYUN.Abp.AI.Ollama/FodyWeavers.xml b/aspnet-core/modules/ai/LINGYUN.Abp.AI.Ollama/FodyWeavers.xml
new file mode 100644
index 000000000..1715698cc
--- /dev/null
+++ b/aspnet-core/modules/ai/LINGYUN.Abp.AI.Ollama/FodyWeavers.xml
@@ -0,0 +1,3 @@
+
+
+
\ No newline at end of file
diff --git a/aspnet-core/modules/ai/LINGYUN.Abp.AI.Ollama/FodyWeavers.xsd b/aspnet-core/modules/ai/LINGYUN.Abp.AI.Ollama/FodyWeavers.xsd
new file mode 100644
index 000000000..3f3946e28
--- /dev/null
+++ b/aspnet-core/modules/ai/LINGYUN.Abp.AI.Ollama/FodyWeavers.xsd
@@ -0,0 +1,30 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 'true' to run assembly verification (PEVerify) on the target assembly after all weavers have been executed.
+
+
+
+
+ A comma-separated list of error codes that can be safely ignored in assembly verification.
+
+
+
+
+ 'false' to turn off automatic generation of the XML Schema file.
+
+
+
+
+
\ No newline at end of file
diff --git a/aspnet-core/modules/ai/LINGYUN.Abp.AI.Ollama/LINGYUN.Abp.AI.Ollama.csproj b/aspnet-core/modules/ai/LINGYUN.Abp.AI.Ollama/LINGYUN.Abp.AI.Ollama.csproj
new file mode 100644
index 000000000..60d093d0a
--- /dev/null
+++ b/aspnet-core/modules/ai/LINGYUN.Abp.AI.Ollama/LINGYUN.Abp.AI.Ollama.csproj
@@ -0,0 +1,27 @@
+
+
+
+
+
+
+ netstandard2.0;netstandard2.1;net8.0;net9.0;net10.0
+ LINGYUN.Abp.AI.Ollama
+ LINGYUN.Abp.AI.Ollama
+ $(NoWarn);SKEXP0001,SKEXP0010,SKEXP0070
+ false
+ false
+ false
+ enable
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/aspnet-core/modules/ai/LINGYUN.Abp.AI.Ollama/LINGYUN/Abp/AI/Ollama/AbpAIOllamaModule.cs b/aspnet-core/modules/ai/LINGYUN.Abp.AI.Ollama/LINGYUN/Abp/AI/Ollama/AbpAIOllamaModule.cs
new file mode 100644
index 000000000..7c061c42e
--- /dev/null
+++ b/aspnet-core/modules/ai/LINGYUN.Abp.AI.Ollama/LINGYUN/Abp/AI/Ollama/AbpAIOllamaModule.cs
@@ -0,0 +1,17 @@
+using Volo.Abp.Modularity;
+
+namespace LINGYUN.Abp.AI.Ollama;
+
+[DependsOn(typeof(AbpAICoreModule))]
+public class AbpAIOllamaModule : AbpModule
+{
+ public override void ConfigureServices(ServiceConfigurationContext context)
+ {
+ Configure(options =>
+ {
+ options.ChatClientProviders.Add();
+
+ options.KernelProviders.Add();
+ });
+ }
+}
diff --git a/aspnet-core/modules/ai/LINGYUN.Abp.AI.Ollama/LINGYUN/Abp/AI/Ollama/OllamaChatClientProvider.cs b/aspnet-core/modules/ai/LINGYUN.Abp.AI.Ollama/LINGYUN/Abp/AI/Ollama/OllamaChatClientProvider.cs
new file mode 100644
index 000000000..7ce79dde8
--- /dev/null
+++ b/aspnet-core/modules/ai/LINGYUN.Abp.AI.Ollama/LINGYUN/Abp/AI/Ollama/OllamaChatClientProvider.cs
@@ -0,0 +1,51 @@
+using LINGYUN.Abp.AI.Models;
+using LINGYUN.Abp.AI.Workspaces;
+using Microsoft.Extensions.AI;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.Extensions.Options;
+using OllamaSharp;
+using System;
+using System.Threading.Tasks;
+using Volo.Abp;
+
+namespace LINGYUN.Abp.AI.Ollama;
+public class OllamaChatClientProvider : ChatClientProvider
+{
+ public const string DefaultEndpoint = "http://localhost:11434";
+
+ public const string ProviderName = "Ollama";
+ public override string Name => ProviderName;
+
+ public OllamaChatClientProvider(
+ IServiceProvider serviceProvider) : base(serviceProvider)
+ {
+ }
+
+ public async override Task CreateAsync(WorkspaceDefinition workspace)
+ {
+ Check.NotNull(workspace, nameof(workspace));
+
+ var options = ServiceProvider.GetRequiredService>().Value;
+
+ var ollamaApiClient = new OllamaApiClient(workspace.ApiBaseUrl ?? DefaultEndpoint);
+
+ var chatClientBuilder = ChatClientBuilderChatClientExtensions.AsBuilder(ollamaApiClient);
+
+ foreach (var handlerAction in options.ChatClientBuildActions)
+ {
+ await handlerAction(workspace, ServiceProvider, chatClientBuilder);
+ }
+
+ return chatClientBuilder
+ .UseLogging()
+ .UseOpenTelemetry()
+ .UseFunctionInvocation()
+ .UseDistributedCache()
+ .Build(ServiceProvider);
+ }
+
+ public override ChatModel[] GetModels()
+ {
+ return [];
+ }
+}
diff --git a/aspnet-core/modules/ai/LINGYUN.Abp.AI.Ollama/LINGYUN/Abp/AI/Ollama/OllamaKernelProvider.cs b/aspnet-core/modules/ai/LINGYUN.Abp.AI.Ollama/LINGYUN/Abp/AI/Ollama/OllamaKernelProvider.cs
new file mode 100644
index 000000000..3cef63e1f
--- /dev/null
+++ b/aspnet-core/modules/ai/LINGYUN.Abp.AI.Ollama/LINGYUN/Abp/AI/Ollama/OllamaKernelProvider.cs
@@ -0,0 +1,39 @@
+using LINGYUN.Abp.AI.Workspaces;
+using Microsoft.Extensions.AI;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.Extensions.Options;
+using Microsoft.SemanticKernel;
+using OllamaSharp;
+using System;
+using System.Threading.Tasks;
+using Volo.Abp;
+
+namespace LINGYUN.Abp.AI.Ollama;
+public class OllamaKernelProvider : KernelProvider
+{
+ public const string ProviderName = "Ollama";
+ public override string Name => ProviderName;
+
+ public OllamaKernelProvider(IServiceProvider serviceProvider) : base(serviceProvider)
+ {
+ }
+
+ public override Task CreateAsync(WorkspaceDefinition workspace)
+ {
+ Check.NotNull(workspace, nameof(workspace));
+
+ var options = ServiceProvider.GetRequiredService>().Value;
+
+ var ollamaApiClient = new OllamaApiClient(workspace.ApiBaseUrl ?? OllamaChatClientProvider.DefaultEndpoint);
+
+ var kernelBuilder = Kernel.CreateBuilder()
+ .AddOllamaChatClient(ollamaApiClient);
+
+ foreach (var handlerAction in options.KernelBuildActions)
+ {
+ handlerAction(workspace, kernelBuilder);
+ }
+
+ return Task.FromResult(kernelBuilder.Build());
+ }
+}