From 2c0c83cc7443470a3f6f29fbfc0dcc50e4de8005 Mon Sep 17 00:00:00 2001 From: Pavan Kumar <34813177+pavan-kumar-99@users.noreply.github.com> Date: Sat, 8 Jun 2024 23:01:13 -0400 Subject: [PATCH] Update ollama-server.yaml --- .../home-cluster/ollama/ollama-server.yaml | 132 +++++++++--------- 1 file changed, 66 insertions(+), 66 deletions(-) diff --git a/clusters/home-cluster/ollama/ollama-server.yaml b/clusters/home-cluster/ollama/ollama-server.yaml index 65b844b..6ce47c1 100644 --- a/clusters/home-cluster/ollama/ollama-server.yaml +++ b/clusters/home-cluster/ollama/ollama-server.yaml @@ -1,66 +1,66 @@ -# apiVersion: source.toolkit.fluxcd.io/v1 -# kind: HelmRepository -# metadata: -# name: ollama-helm -# namespace: flux-system -# spec: -# interval: 1m -# url: https://helm.openwebui.com/ -# --- -# apiVersion: helm.toolkit.fluxcd.io/v2 -# kind: HelmRelease -# metadata: -# name: ollama-server -# namespace: ollama -# spec: -# interval: 10m -# chart: -# spec: -# chart: open-webui -# version: '2.1.0' -# sourceRef: -# kind: HelmRepository -# name: ollama-helm -# namespace: flux-system -# interval: 1m -# values: -# ollamaUrls: -# - 'http://ollama-server:11434' -# resources: -# requests: -# cpu: "500m" -# memory: "1Gi" -# limits: -# cpu: "1000m" -# memory: "4Gi" -# ollama: -# # -- Automatically install Ollama Helm chart from https://otwld.github.io/ollama-helm/. Use [Helm Values](https://github.com/otwld/ollama-helm/#helm-values) to configure -# enabled: true -# image: -# tag: "0.1.42" -# resources: -# requests: -# cpu: "4000m" -# memory: "4Gi" -# limits: -# cpu: "6000m" -# memory: "6Gi" -# replicaCount: 1 -# ollama: -# gpu: -# enabled: true -# type: 'nvidia' -# number: 1 -# # models: -# # - gemma -# autoscaling: -# enabled: false -# minReplicas: 3 -# maxReplicas: 4 -# targetCPUUtilizationPercentage: 50 -# persistentVolume: -# enabled: true -# accessModes: -# - ReadWriteOnce -# size: 100Gi -# storageClass: "ceph-block-ssd" +apiVersion: source.toolkit.fluxcd.io/v1 +kind: HelmRepository +metadata: + name: ollama-helm + namespace: flux-system +spec: + interval: 1m + url: https://helm.openwebui.com/ +--- +apiVersion: helm.toolkit.fluxcd.io/v2 +kind: HelmRelease +metadata: + name: ollama-server + namespace: ollama +spec: + interval: 10m + chart: + spec: + chart: open-webui + version: '2.1.0' + sourceRef: + kind: HelmRepository + name: ollama-helm + namespace: flux-system + interval: 1m + values: + ollamaUrls: + - 'http://ollama-server:11434' + resources: + requests: + cpu: "500m" + memory: "1Gi" + limits: + cpu: "1000m" + memory: "4Gi" + ollama: + # -- Automatically install Ollama Helm chart from https://otwld.github.io/ollama-helm/. Use [Helm Values](https://github.com/otwld/ollama-helm/#helm-values) to configure + enabled: true + image: + tag: "0.1.42" + resources: + requests: + cpu: "4000m" + memory: "4Gi" + limits: + cpu: "6000m" + memory: "6Gi" + replicaCount: 1 + ollama: + gpu: + enabled: true + type: 'nvidia' + number: 1 + # models: + # - gemma + autoscaling: + enabled: false + minReplicas: 3 + maxReplicas: 4 + targetCPUUtilizationPercentage: 50 + persistentVolume: + enabled: true + accessModes: + - ReadWriteOnce + size: 100Gi + storageClass: "ceph-block-ssd"