Skip to content

Commit

Permalink
Readme done
Browse files Browse the repository at this point in the history
  • Loading branch information
BLaZeKiLL committed Dec 10, 2023
1 parent 09a0755 commit 2d489d2
Show file tree
Hide file tree
Showing 9 changed files with 312 additions and 8 deletions.
2 changes: 1 addition & 1 deletion .idea/.idea.Codeblaze.SemanticKernel/.idea/vcs.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ public static class OllamaKernelBuilderExtensions
/// <param name="baseUrl">Ollama base url</param>
/// <param name="serviceId"></param>
/// <returns></returns>
public static IKernelBuilder WithOllamaTextGenerationService(
public static IKernelBuilder AddOllamaTextGeneration(
this IKernelBuilder builder,
string modelId,
string baseUrl,
Expand All @@ -45,7 +45,7 @@ public static IKernelBuilder WithOllamaTextGenerationService(
/// <param name="baseUrl">Ollama base url</param>
/// <param name="serviceId"></param>
/// <returns></returns>
public static IKernelBuilder WithOllamaTextGenerationService(
public static IKernelBuilder AddOllamaTextGeneration(
this IKernelBuilder builder,
string modelId,
Uri baseUrl,
Expand All @@ -72,7 +72,7 @@ public static IKernelBuilder WithOllamaTextGenerationService(
/// <param name="baseUrl">Ollama base url</param>
/// <param name="serviceId"></param>
/// <returns></returns>
public static IKernelBuilder WithOllamaChatCompletionService(
public static IKernelBuilder AddOllamaChatCompletion(
this IKernelBuilder builder,
string modelId,
string baseUrl,
Expand All @@ -99,7 +99,7 @@ public static IKernelBuilder WithOllamaChatCompletionService(
/// <param name="baseUrl">Ollama base url</param>
/// <param name="serviceId"></param>
/// <returns></returns>
public static IKernelBuilder WithOllamaChatCompletionService(
public static IKernelBuilder AddOllamaChatCompletion(
this IKernelBuilder builder,
string modelId,
Uri baseUrl,
Expand All @@ -126,7 +126,7 @@ public static IKernelBuilder WithOllamaChatCompletionService(
/// <param name="baseUrl">Ollama base url</param>
/// <param name="serviceId"></param>
/// <returns></returns>
public static IKernelBuilder WithOllamaTextEmbeddingGeneration(
public static IKernelBuilder AddOllamaTextEmbeddingGeneration(
this IKernelBuilder builder,
string modelId,
string baseUrl,
Expand Down Expand Up @@ -154,7 +154,7 @@ public static IKernelBuilder WithOllamaTextEmbeddingGeneration(
/// <param name="baseUrl">Ollama base url</param>
/// <param name="serviceId"></param>
/// <returns></returns>
public static IKernelBuilder WithOllamaTextEmbeddingGeneration(
public static IKernelBuilder AddOllamaTextEmbeddingGeneration(
this IKernelBuilder builder,
string modelId,
Uri baseUrl,
Expand Down
110 changes: 109 additions & 1 deletion Codeblaze.SemanticKernel.Connectors.AI.Ollama/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,112 @@
Supports
- text generation
- chat completion
- embedding generation
- embedding generation

> :warning: **Embedding generation**: Is experimental in the semantic kernel.
### Quick Start
- Text Generation

Configure the kernel
```cs
var builder = new KernelBuilder();

// provide the HTTP client used to interact with Ollama API
builder.Services.AddTransient<HttpClient>();

builder.AddOllamaTextGeneration(
config["Ollama:Model"], // Ollama model Id
config["Ollama:BaseUrlGeneration"] // Ollama endpoint
);

var kernel = builder.Build();
```

Usage
```cs
const string prompt = """
Bot: How can I help you?
User: {{$input}}
---------------------------------------------
The intent of the user in 5 words or less:
""";

var result = await kernel.InvokePromptAsync(prompt, new KernelArguments
{
{"input", input}
});

System.Console.WriteLine(result.GetValue<string>());
```

- Chat Completion

Configure the kernel
```cs
var builder = new KernelBuilder();

// provide the HTTP client used to interact with Ollama API
builder.Services.AddTransient<HttpClient>();

builder.AddOllamaChatCompletion(
config["Ollama:Model"], // Ollama model Id
config["Ollama:BaseUrlGeneration"] // Ollama endpoint
);

var kernel = builder.Build();
```

Usage
```cs
var chat = _Kernel.GetRequiredService<IChatCompletionService>();

var history = new ChatHistory();

// add messages to current chat history as required
history.AddSystemMessage("...");
history.AddAssistantMessage("...");
history.AddUserMessage(input);

// result is a list of all chat messages
// including the output of current prompt
var result = await chat.GetChatMessageContentsAsync(history);

// Print the last message
System.Console.WriteLine(result[^1].Content);
```

- Embedding Generation (Experimental)

Configure the kernel
```cs
var builder = new KernelBuilder();

// provide the HTTP client used to interact with Ollama API
builder.Services.AddTransient<HttpClient>();

builder.AddOllamaTextEmbeddingGeneration(
config["Ollama:Model"], // Ollama model Id
config["Ollama:BaseUrlGeneration"] // Ollama endpoint
);

// Configure memory backend (e.g Azure Cognitive Search)
var kernel = builder.Build();
```

Usage
```cs
var memory = _Kernel.GetRequiredService<ISemanticTextMemory>();

// This will internally call Ollama embedding service to generate embeddings
memory.SaveReferenceAsync(
collection: "collection",
externalSourceName: "ext-collection",
externalId: id, // reference id (database entity id)
description: input,
text: input
);
```
1 change: 1 addition & 0 deletions Codeblaze.SemanticKernel.Console/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
appsettings.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
<Project Sdk="Microsoft.NET.Sdk">

<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net8.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<DockerDefaultTargetOS>Linux</DockerDefaultTargetOS>
</PropertyGroup>

<ItemGroup>
<Content Include="..\.dockerignore">
<Link>.dockerignore</Link>
</Content>
</ItemGroup>

<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Configuration" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.Json" Version="8.0.0" />
<PackageReference Include="Spectre.Console" Version="0.48.0" />
<PackageReference Include="Spectre.Console.Json" Version="0.48.0" />
</ItemGroup>

<ItemGroup>
<None Include="*.json"
CopyToOutputDirectory="Always"
CopyToPublishDirectory="Always"
/>
</ItemGroup>

<ItemGroup>
<ProjectReference Include="..\Codeblaze.SemanticKernel.Connectors.AI.Ollama\Codeblaze.SemanticKernel.Connectors.AI.Ollama.csproj" />
</ItemGroup>

</Project>
21 changes: 21 additions & 0 deletions Codeblaze.SemanticKernel.Console/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
FROM mcr.microsoft.com/dotnet/runtime:8.0 AS base
USER $APP_UID
WORKDIR /app

FROM mcr.microsoft.com/dotnet/sdk:8.0 AS build
ARG BUILD_CONFIGURATION=Release
WORKDIR /src
COPY ["Codeblaze.SemanticKernel.Console/Codeblaze.SemanticKernel.Console.csproj", "Codeblaze.SemanticKernel.Console/"]
RUN dotnet restore "Codeblaze.SemanticKernel.Console/Codeblaze.SemanticKernel.Console.csproj"
COPY . .
WORKDIR "/src/Codeblaze.SemanticKernel.Console"
RUN dotnet build "Codeblaze.SemanticKernel.Console.csproj" -c $BUILD_CONFIGURATION -o /app/build

FROM build AS publish
ARG BUILD_CONFIGURATION=Release
RUN dotnet publish "Codeblaze.SemanticKernel.Console.csproj" -c $BUILD_CONFIGURATION -o /app/publish /p:UseAppHost=false

FROM base AS final
WORKDIR /app
COPY --from=publish /app/publish .
ENTRYPOINT ["dotnet", "Codeblaze.SemanticKernel.Console.dll"]
74 changes: 74 additions & 0 deletions Codeblaze.SemanticKernel.Console/Program.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
using Codeblaze.SemanticKernel.Console.Services;
using Microsoft.Extensions.Configuration;
using Spectre.Console;

var config = new ConfigurationBuilder()
.AddJsonFile("appsettings.json")
.Build();

AnsiConsole.Write(new FigletText($"{config["Name"]!}").Color(Color.Green));
AnsiConsole.WriteLine("");

KernelService kernel = null;

Check warning on line 12 in Codeblaze.SemanticKernel.Console/Program.cs

View workflow job for this annotation

GitHub Actions / create_nuget

Converting null literal or possible null value to non-nullable type.

AnsiConsole.Status().Start("Initializing...", ctx =>
{
ctx.Spinner(Spinner.Known.Star);
ctx.SpinnerStyle(Style.Parse("green"));
kernel = new KernelService(config);
ctx.Status("Initialized");
});

const string prompt = "1.\tPrompt kernel";
const string exit = "2.\tExit";

Run();

return;

void Run()
{
while (true)
{
var option = AnsiConsole.Prompt(
new SelectionPrompt<string>()
.Title("Select an option")
.PageSize(10)
.MoreChoicesText("[grey](Move up and down to reveal more options)[/]")
.AddChoices(prompt, exit)
);

switch (option)
{
case prompt:
Prompt().GetAwaiter().GetResult();
break;
case exit:
return;
}
}
}

async Task Prompt()
{
var prompt = AnsiConsole.Prompt(new TextPrompt<string>("What are you looking to do today ?").PromptStyle("teal"));

var result = string.Empty;

await AnsiConsole.Status().StartAsync("Processing...", async ctx =>
{
ctx.Spinner(Spinner.Known.Star);
ctx.SpinnerStyle(Style.Parse("green"));
ctx.Status($"Processing input to generate Chat Response");
result = await kernel.BasicPrompt(prompt);

Check warning on line 66 in Codeblaze.SemanticKernel.Console/Program.cs

View workflow job for this annotation

GitHub Actions / create_nuget

Dereference of a possibly null reference.
});

AnsiConsole.WriteLine("");
AnsiConsole.Write(new Rule($"[silver]AI Assistant Response[/]") { Justification = Justify.Center });
AnsiConsole.WriteLine(result);
AnsiConsole.Write(new Rule($"[yellow]****[/]") { Justification = Justify.Center });
AnsiConsole.WriteLine("");
}
59 changes: 59 additions & 0 deletions Codeblaze.SemanticKernel.Console/Services/KernelService.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
using Codeblaze.SemanticKernel.Connectors.AI.Ollama;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.AI.ChatCompletion;
using Microsoft.SemanticKernel.Memory;

namespace Codeblaze.SemanticKernel.Console.Services;

public class KernelService
{
private readonly Kernel _Kernel;

public KernelService(IConfiguration config)
{
var builder = new KernelBuilder();

builder.Services.AddTransient<HttpClient>();

builder
.AddOllamaTextGeneration(config["Ollama:Model"], config["Ollama:BaseUrlGeneration"]);

Check warning on line 21 in Codeblaze.SemanticKernel.Console/Services/KernelService.cs

View workflow job for this annotation

GitHub Actions / create_nuget

Possible null reference argument for parameter 'modelId' in 'IKernelBuilder OllamaKernelBuilderExtensions.AddOllamaTextGeneration(IKernelBuilder builder, string modelId, string baseUrl, string? serviceId = null)'.

Check warning on line 21 in Codeblaze.SemanticKernel.Console/Services/KernelService.cs

View workflow job for this annotation

GitHub Actions / create_nuget

Possible null reference argument for parameter 'baseUrl' in 'IKernelBuilder OllamaKernelBuilderExtensions.AddOllamaTextGeneration(IKernelBuilder builder, string modelId, string baseUrl, string? serviceId = null)'.

_Kernel = builder.Build();
}

public async Task<string> BasicPrompt(string input)
{
const string prompt = """
Bot: How can I help you?
User: {{$input}}

---------------------------------------------

The intent of the user in 5 words or less:
""";

var result = await _Kernel.InvokePromptAsync(prompt, new KernelArguments
{
{"input", input}
});

return result.GetValue<string>();

Check warning on line 42 in Codeblaze.SemanticKernel.Console/Services/KernelService.cs

View workflow job for this annotation

GitHub Actions / create_nuget

Possible null reference return.
}

public async Task<string> BasicChat(string input)
{
var chat = _Kernel.GetRequiredService<IChatCompletionService>();

var history = new ChatHistory();

history.AddSystemMessage("...");
history.AddAssistantMessage("...");
history.AddUserMessage(input);

var result = await chat.GetChatMessageContentsAsync(history);

return result[^1].Content;
}
}
6 changes: 6 additions & 0 deletions Codeblaze.SemanticKernel.sln
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Codeblaze.SemanticKernel.Co
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Codeblaze.SemanticKernel.Api", "Codeblaze.SemanticKernel.Api\Codeblaze.SemanticKernel.Api.csproj", "{51E5DA79-8F4E-4D8D-95ED-96C724741E6F}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Codeblaze.SemanticKernel.Console", "Codeblaze.SemanticKernel.Console\Codeblaze.SemanticKernel.Console.csproj", "{AC32C115-684E-408C-9320-8D915FF7E3A1}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Expand All @@ -18,5 +20,9 @@ Global
{51E5DA79-8F4E-4D8D-95ED-96C724741E6F}.Debug|Any CPU.Build.0 = Debug|Any CPU
{51E5DA79-8F4E-4D8D-95ED-96C724741E6F}.Release|Any CPU.ActiveCfg = Release|Any CPU
{51E5DA79-8F4E-4D8D-95ED-96C724741E6F}.Release|Any CPU.Build.0 = Release|Any CPU
{AC32C115-684E-408C-9320-8D915FF7E3A1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{AC32C115-684E-408C-9320-8D915FF7E3A1}.Debug|Any CPU.Build.0 = Debug|Any CPU
{AC32C115-684E-408C-9320-8D915FF7E3A1}.Release|Any CPU.ActiveCfg = Release|Any CPU
{AC32C115-684E-408C-9320-8D915FF7E3A1}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
EndGlobal

0 comments on commit 2d489d2

Please sign in to comment.