Skip to content

Commit

Permalink
Merge pull request #34 from CommunityToolkit/aaronpowell/issue32
Browse files Browse the repository at this point in the history
Supporting multiple models in Ollama
  • Loading branch information
aaronpowell authored Sep 27, 2024
2 parents 0570ad3 + 4c4c648 commit 60c72e4
Show file tree
Hide file tree
Showing 7 changed files with 249 additions and 102 deletions.
10 changes: 4 additions & 6 deletions docs/integrations/hosting-ollama.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,21 +13,20 @@ Use the static `AddOllama` method to add this container component to the applica
```csharp
// The distributed application builder is created here
var ollama = builder.AddOllama("ollama");
var ollama = builder.AddOllama("ollama").AddModel("llama3");

// The builder is used to build and run the app somewhere down here
```

### Configuration

The AddOllama method has optional arguments to set the `name`, `port` and `modelName`.
The AddOllama method has optional arguments to set the `name` and `port`.
The `name` is what gets displayed in the Aspire orchestration app against this component.
The `port` is provided randomly by Aspire. If for whatever reason you need a fixed port, you can set that here.
The `modelName` specifies what LLM to pull when it starts up. The default is `llama3`. You can also set this to null to prevent any models being pulled on startup - leaving you with a plain Ollama container to work with.

## Downloading the LLM

When the Ollama container for this component first spins up, this component will download the LLM (llama3 unless otherwise specified).
When the Ollama container for this component first spins up, this component will download the LLM(s).
The progress of this download will be displayed in the State column for this component on the Aspire orchestration app.
Important: Keep the Aspire orchestration app open until the download is complete, otherwise the download will be cancelled.
In the spirit of productivity, we recommend kicking off this process before heading for lunch.
Expand All @@ -45,8 +44,7 @@ Within that component (e.g. a web app), you can fetch the Ollama connection stri
Note that if you changed the name of the Ollama component via the `name` argument, then you'll need to use that here when specifying which connection string to get.

```csharp
var connectionString = builder.Configuration.GetConnectionString("Ollama");
var connectionString = builder.Configuration.GetConnectionString("ollama");
```

You can then call any of the Ollama endpoints through this connection string. We recommend using the [OllamaSharp](https://www.nuget.org/packages/OllamaSharp) client to do this.

Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
var builder = DistributedApplication.CreateBuilder(args);

var ollama = builder.AddOllama("ollama", modelName: "phi3");
var ollama = builder.AddOllama("ollama", port: null)
.AddModel("phi3")
.WithDefaultModel("phi3");

builder.AddProject<Projects.Aspire_CommunityToolkit_Hosting_Ollama_Web>("webfrontend")
.WithExternalHttpEndpoints()
Expand Down
49 changes: 46 additions & 3 deletions src/Aspire.CommunityToolkit.Hosting.Ollama/OllamaResource.cs
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,25 @@
/// </remarks>
/// <param name="name">The name for the resource.</param>
/// <param name="modelName">The LLM to download on initial startup.</param>
public class OllamaResource(string name, string modelName) : ContainerResource(name), IResourceWithConnectionString
public class OllamaResource(string name) : ContainerResource(name), IResourceWithConnectionString
{
internal const string OllamaEndpointName = "ollama";

private readonly List<string> _models = [];

private string? _defaultModel = null;

private EndpointReference? _endpointReference;

public string ModelName { get; internal set; } = modelName;
/// <summary>
/// Adds a model to the list of models to download on initial startup.
/// </summary>
public IReadOnlyList<string> Models => _models;

/// <summary>
/// The default model to be configured on the Ollama server.
/// </summary>
public string? DefaultModel => _defaultModel;

/// <summary>
/// Gets the endpoint for the Ollama server.
Expand All @@ -28,4 +40,35 @@ public class OllamaResource(string name, string modelName) : ContainerResource(n
ReferenceExpression.Create(
$"http://{Endpoint.Property(EndpointProperty.Host)}:{Endpoint.Property(EndpointProperty.Port)}"
);
}

/// <summary>
/// Adds a model to the list of models to download on initial startup.
/// </summary>
/// <param name="modelName">The name of the model</param>
public void AddModel(string modelName)
{
ArgumentNullException.ThrowIfNullOrEmpty(modelName, nameof(modelName));
if (!_models.Contains(modelName))
{
_models.Add(modelName);
}
}

/// <summary>
/// Sets the default model to be configured on the Ollama server.
/// </summary>
/// <param name="modelName">The name of the model.</param>
/// <remarks>
/// If the model does not exist in the list of models, it will be added.
/// </remarks>
public void SetDefaultModel(string modelName)
{
ArgumentNullException.ThrowIfNullOrEmpty(modelName, nameof(modelName));
_defaultModel = modelName;

if (!_models.Contains(modelName))
{
AddModel(modelName);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -10,41 +10,86 @@ namespace Aspire.Hosting;
/// </summary>
public static class OllamaResourceBuilderExtensions
{
/// <summary>
/// Adds the Ollama container to the application model.
/// </summary>
/// <param name="builder">The <see cref="IDistributedApplicationBuilder"/>.</param>
/// <param name="name">The name of the resource. This name will be used as the connection string name when referenced in a dependency.</param>
/// <param name="port">An optional fixed port to bind to the Ollama container. This will be provided randomly by Aspire if not set.</param>
/// <param name="modelName">The name of the LLM to download on initial startup. llama3 by default. This can be set to null to not download any models.</param>
/// <returns>A reference to the <see cref="IResourceBuilder{T}"/>.</returns>
public static IResourceBuilder<OllamaResource> AddOllama(this IDistributedApplicationBuilder builder,
string name = "Ollama", int? port = null, string modelName = "llama3")
{
ArgumentNullException.ThrowIfNull(builder, nameof(builder));
ArgumentNullException.ThrowIfNull(name, nameof(name));

builder.Services.TryAddLifecycleHook<OllamaResourceLifecycleHook>();
var resource = new OllamaResource(name, modelName);
return builder.AddResource(resource)
.WithAnnotation(new ContainerImageAnnotation { Image = OllamaContainerImageTags.Image, Tag = OllamaContainerImageTags.Tag, Registry = OllamaContainerImageTags.Registry })
.WithHttpEndpoint(port: port, targetPort: 11434, name: OllamaResource.OllamaEndpointName)
.ExcludeFromManifest();
}

/// <summary>
/// Adds a data volume to the Ollama container.
/// </summary>
/// <param name="builder">The <see cref="IResourceBuilder{T}"/>.</param>
/// <param name="name">The name of the volume. Defaults to an auto-generated name based on the application and resource names.</param>
/// <param name="isReadOnly">A flag that indicates if this is a read-only volume.</param>
/// <returns>A reference to the <see cref="IResourceBuilder{T}"/>.</returns>
public static IResourceBuilder<OllamaResource> WithDataVolume(this IResourceBuilder<OllamaResource> builder, string? name = null, bool isReadOnly = false)
{
ArgumentNullException.ThrowIfNull(builder, nameof(builder));
/// <summary>
/// Adds the Ollama container to the application model.
/// </summary>
/// <param name="builder">The <see cref="IDistributedApplicationBuilder"/>.</param>
/// <param name="name">The name of the resource. This name will be used as the connection string name when referenced in a dependency.</param>
/// <param name="port">An optional fixed port to bind to the Ollama container. This will be provided randomly by Aspire if not set.</param>
/// <returns>A reference to the <see cref="IResourceBuilder{T}"/>.</returns>
public static IResourceBuilder<OllamaResource> AddOllama(this IDistributedApplicationBuilder builder, string name, int? port = null)
{
ArgumentNullException.ThrowIfNull(builder, nameof(builder));
ArgumentNullException.ThrowIfNull(name, nameof(name));

builder.Services.TryAddLifecycleHook<OllamaResourceLifecycleHook>();
var resource = new OllamaResource(name);
return builder.AddResource(resource)
.WithAnnotation(new ContainerImageAnnotation { Image = OllamaContainerImageTags.Image, Tag = OllamaContainerImageTags.Tag, Registry = OllamaContainerImageTags.Registry })
.WithHttpEndpoint(port: port, targetPort: 11434, name: OllamaResource.OllamaEndpointName)
.ExcludeFromManifest();
}

/// <summary>
/// Adds the Ollama container to the application model.
/// </summary>
/// <param name="builder">The <see cref="IDistributedApplicationBuilder"/>.</param>
/// <param name="name">The name of the resource. This name will be used as the connection string name when referenced in a dependency.</param>
/// <param name="port">An optional fixed port to bind to the Ollama container. This will be provided randomly by Aspire if not set.</param>
/// <param name="modelName">The name of the LLM to download on initial startup. llama3 by default. This can be set to null to not download any models.</param>
/// <returns>A reference to the <see cref="IResourceBuilder{T}"/>.</returns>
/// <remarks>This is to maintain compatibility with the Raygun.Aspire.Hosting.Ollama package and will be removed in the next major release.</remarks>
[Obsolete("Use AddOllama without a model name, and then the AddModel extension method to add models.")]
public static IResourceBuilder<OllamaResource> AddOllama(this IDistributedApplicationBuilder builder,
string name = "Ollama", int? port = null, string modelName = "llama3")
{
return builder.AddOllama(name, port)
.AddModel(modelName);
}

/// <summary>
/// Adds a data volume to the Ollama container.
/// </summary>
/// <param name="builder">The <see cref="IResourceBuilder{T}"/>.</param>
/// <param name="name">The name of the volume. Defaults to an auto-generated name based on the application and resource names.</param>
/// <param name="isReadOnly">A flag that indicates if this is a read-only volume.</param>
/// <returns>A reference to the <see cref="IResourceBuilder{T}"/>.</returns>
public static IResourceBuilder<OllamaResource> WithDataVolume(this IResourceBuilder<OllamaResource> builder, string? name = null, bool isReadOnly = false)
{
ArgumentNullException.ThrowIfNull(builder, nameof(builder));

#pragma warning disable CTASPIRE001
return builder.WithVolume(name ?? VolumeNameGenerator.CreateVolumeName(builder, "ollama"), "/root/.ollama", isReadOnly);
return builder.WithVolume(name ?? VolumeNameGenerator.CreateVolumeName(builder, "ollama"), "/root/.ollama", isReadOnly);
#pragma warning restore CTASPIRE001
}
}

/// <summary>
/// Adds a model to the Ollama container.
/// </summary>
/// <param name="builder">The <see cref="IDistributedApplicationBuilder"/>.</param>
/// <param name="modelName">The name of the LLM to download on initial startup.</param>
/// <returns>A reference to the <see cref="IResourceBuilder{T}"/>.</returns>
public static IResourceBuilder<OllamaResource> AddModel(this IResourceBuilder<OllamaResource> builder, string modelName)
{
ArgumentNullException.ThrowIfNull(builder, nameof(builder));
ArgumentException.ThrowIfNullOrWhiteSpace(modelName, nameof(modelName));

builder.Resource.AddModel(modelName);
return builder;
}

/// <summary>
/// Sets the default model to be configured on the Ollama server.
/// </summary>
/// <param name="builder">The <see cref="IDistributedApplicationBuilder"/>.</param>
/// <param name="modelName">The name of the model.</param>
/// <returns>A reference to the <see cref="IResourceBuilder{T}"/>.</returns>
public static IResourceBuilder<OllamaResource> WithDefaultModel(this IResourceBuilder<OllamaResource> builder, string modelName)
{
ArgumentNullException.ThrowIfNull(builder, nameof(builder));
ArgumentException.ThrowIfNullOrWhiteSpace(modelName, nameof(modelName));

builder.Resource.SetDefaultModel(modelName);
return builder;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -33,52 +33,50 @@ public Task AfterResourcesCreatedAsync(DistributedApplicationModel appModel, Can

private void DownloadModel(OllamaResource resource, CancellationToken cancellationToken)
{
if (string.IsNullOrWhiteSpace(resource.ModelName))
{
return;
}

var logger = loggerService.GetLogger(resource);

_ = Task.Run(async () =>
{
try
foreach (string model in resource.Models)
{
var connectionString = await resource.ConnectionStringExpression.GetValueAsync(cancellationToken).ConfigureAwait(false);
if (string.IsNullOrWhiteSpace(connectionString))
try
{
await _notificationService.PublishUpdateAsync(resource, state => state with { State = new ResourceStateSnapshot("No connection string", KnownResourceStateStyles.Error) });
return;
}
var connectionString = await resource.ConnectionStringExpression.GetValueAsync(cancellationToken).ConfigureAwait(false);
var ollamaClient = new OllamaApiClient(new Uri(connectionString));
var model = resource.ModelName;
if (string.IsNullOrWhiteSpace(connectionString))
{
await _notificationService.PublishUpdateAsync(resource, state => state with { State = new ResourceStateSnapshot("No connection string", KnownResourceStateStyles.Error) });
return;
}
await _notificationService.PublishUpdateAsync(resource, state => state with { State = new ResourceStateSnapshot("Checking model", KnownResourceStateStyles.Info) });
var hasModel = await HasModelAsync(ollamaClient, model, cancellationToken);
var ollamaClient = new OllamaApiClient(new Uri(connectionString));
if (!hasModel)
{
logger.LogInformation("{TimeStamp}: [{Model}] needs to be downloaded for {ResourceName}",
DateTime.UtcNow.ToString("yyyy-MM-ddTHH:mm:ss.fffZ", CultureInfo.InvariantCulture),
resource.ModelName,
resource.Name);
await PullModel(resource, ollamaClient, model, logger, cancellationToken);
await _notificationService.PublishUpdateAsync(resource, state => state with { State = new ResourceStateSnapshot($"Checking {model}", KnownResourceStateStyles.Info) });
var hasModel = await HasModelAsync(ollamaClient, model, cancellationToken);
if (!hasModel)
{
logger.LogInformation("{TimeStamp}: [{Model}] needs to be downloaded for {ResourceName}",
DateTime.UtcNow.ToString("yyyy-MM-ddTHH:mm:ss.fffZ", CultureInfo.InvariantCulture),
model,
resource.Name);
await PullModel(resource, ollamaClient, model, logger, cancellationToken);
}
else
{
logger.LogInformation("{TimeStamp}: [{Model}] already exists for {ResourceName}",
DateTime.UtcNow.ToString("yyyy-MM-ddTHH:mm:ss.fffZ", CultureInfo.InvariantCulture),
model,
resource.Name);
}
await _notificationService.PublishUpdateAsync(resource, state => state with { State = new ResourceStateSnapshot("Running", KnownResourceStateStyles.Success) });
}
else
catch (Exception ex)
{
logger.LogInformation("{TimeStamp}: [{Model}] already exists for {ResourceName}",
DateTime.UtcNow.ToString("yyyy-MM-ddTHH:mm:ss.fffZ", CultureInfo.InvariantCulture),
resource.ModelName,
resource.Name);
await _notificationService.PublishUpdateAsync(resource, state => state with { State = new ResourceStateSnapshot(ex.Message, KnownResourceStateStyles.Error) });
break;
}
await _notificationService.PublishUpdateAsync(resource, state => state with { State = new ResourceStateSnapshot("Running", KnownResourceStateStyles.Success) });
}
catch (Exception ex)
{
await _notificationService.PublishUpdateAsync(resource, state => state with { State = new ResourceStateSnapshot(ex.Message, KnownResourceStateStyles.Error) });
}
}, cancellationToken).ConfigureAwait(false);
Expand Down Expand Up @@ -110,7 +108,7 @@ private async Task PullModel(OllamaResource resource, OllamaApiClient ollamaClie
logger.LogInformation("{TimeStamp}: Pulling ollama model {Model}...",
DateTime.UtcNow.ToString("yyyy-MM-ddTHH:mm:ss.fffZ", CultureInfo.InvariantCulture),
model);
await _notificationService.PublishUpdateAsync(resource, state => state with { State = new ResourceStateSnapshot("Downloading model", KnownResourceStateStyles.Info) });
await _notificationService.PublishUpdateAsync(resource, state => state with { State = new ResourceStateSnapshot($"Downloading {model}", KnownResourceStateStyles.Info) });

long percentage = 0;

Expand All @@ -128,7 +126,7 @@ private async Task PullModel(OllamaResource resource, OllamaApiClient ollamaClie
{
percentage = newPercentage;

var percentageState = percentage == 0 ? "Downloading model" : $"Downloading model {percentage} percent";
var percentageState = $"Downloading {model}{(percentage > 0 ? $" {percentage} percent" : "")}";
await _notificationService.PublishUpdateAsync(resource,
state => state with
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,25 +7,12 @@ namespace Aspire.CommunityToolkit.Hosting.Java.Tests;
#pragma warning disable CTASPIRE001
public class JavaHostingComponentTests(AspireIntegrationTestFixture<Projects.Aspire_CommunityToolkit_Hosting_Java_AppHost> fixture) : IClassFixture<AspireIntegrationTestFixture<Projects.Aspire_CommunityToolkit_Hosting_Java_AppHost>>
{
[ConditionalFact]
[ConditionalTheory]
[OSSkipCondition(OperatingSystems.Windows)]
public async Task ContainerAppResourceWillRespondWithOk()
[InlineData("containerapp")]
[InlineData("executableapp")]
public async Task AppResourceWillRespondWithOk(string resourceName)
{
var resourceName = "containerapp";
var httpClient = fixture.CreateHttpClient(resourceName);

await fixture.App.WaitForTextAsync("Started SpringMavenApplication", resourceName).WaitAsync(TimeSpan.FromMinutes(5));

var response = await httpClient.GetAsync("/");

response.StatusCode.Should().Be(HttpStatusCode.OK);
}

[ConditionalFact]
[OSSkipCondition(OperatingSystems.Windows)]
public async Task ExecutableAppResourceWillRespondWithOk()
{
var resourceName = "executableapp";
var httpClient = fixture.CreateHttpClient(resourceName);

await fixture.App.WaitForTextAsync("Started SpringMavenApplication", resourceName).WaitAsync(TimeSpan.FromMinutes(5));
Expand Down
Loading

0 comments on commit 60c72e4

Please sign in to comment.