--- apiVersion: source.toolkit.fluxcd.io/v1 kind: HelmRepository metadata: name: bat-librechat namespace: librechat spec: interval: 24h url: https://charts.blue-atlas.de --- apiVersion: helm.toolkit.fluxcd.io/v2 kind: HelmRelease metadata: name: librechat namespace: librechat spec: interval: 30m chart: spec: chart: librechat version: 1.8.10 sourceRef: kind: HelmRepository name: bat-librechat values: global: librechat: existingSecretName: librechat librechat: configEnv: PLUGIN_MODELS: null ALLOW_REGISTRATION: "false" TRUST_PROXY: "1" DOMAIN_CLIENT: https://librechat.lumpiasty.xyz SEARCH: "true" existingSecretName: librechat configYamlContent: | version: 1.0.3 endpoints: custom: - name: "Ollama" apiKey: "ollama" baseURL: "http://ollama.ollama.svc.cluster.local:11434/v1/chat/completions" models: default: [ "llama2", "mistral", "codellama", "dolphin-mixtral", "mistral-openorca" ] # fetching list of models is supported but the `name` field must start # with `ollama` (case-insensitive), as it does in this example. fetch: true titleConvo: true titleModel: "current_model" summarize: false summaryModel: "current_model" forcePrompt: false modelDisplayLabel: "Ollama" - name: "Llama.cpp" apiKey: "llama" baseURL: "http://llama.llama.svc.cluster.local:11434/v1/chat/completions" models: default: [ "DeepSeek-R1-0528-Qwen3-8B-GGUF", "Qwen3-8B-GGUF", "Qwen3-8B-GGUF-Q6_K" ] titleConvo: true titleModel: "current_model" summarize: false summaryModel: "current_model" forcePrompt: false modelDisplayLabel: "Llama.cpp" imageVolume: enabled: true size: 10G accessModes: ReadWriteOnce storageClassName: mayastor-single-hdd ingress: enabled: true className: nginx annotations: cert-manager.io/cluster-issuer: letsencrypt hosts: - host: librechat.lumpiasty.xyz paths: - path: / pathType: ImplementationSpecific tls: - hosts: - librechat.lumpiasty.xyz secretName: librechat-ingress mongodb: persistence: storageClass: mayastor-single-hdd meilisearch: persistence: storageClass: mayastor-single-hdd auth: existingMasterKeySecret: librechat