add glm-5 from openrouter to llama-swap

This commit is contained in:
2026-03-08 17:58:01 +01:00
parent 4fda343b01
commit e72a79be8f
4 changed files with 40 additions and 0 deletions

View File

@@ -12,6 +12,13 @@ macros:
thinking_on: "--chat-template-kwargs '{\"enable_thinking\": true}'"
thinking_off: "--chat-template-kwargs '{\"enable_thinking\": false}'"
peers:
openrouter:
proxy: https://openrouter.ai/api
apiKey: ${env.OPENROUTER_API_KEY}
models:
- z-ai/glm-5
hooks:
on_startup:
preload:

View File

@@ -30,6 +30,12 @@ spec:
- containerPort: 8080
name: http
protocol: TCP
env:
- name: OPENROUTER_API_KEY
valueFrom:
secretKeyRef:
name: llama-openrouter
key: OPENROUTER_API_KEY
volumeMounts:
- name: models
mountPath: /root/.cache

View File

@@ -36,3 +36,26 @@ spec:
excludeRaw: true
vaultAuthRef: llama
---
apiVersion: secrets.hashicorp.com/v1beta1
kind: VaultStaticSecret
metadata:
name: llama-openrouter
namespace: llama
spec:
type: kv-v2
mount: secret
path: openrouter
destination:
create: true
name: llama-openrouter
type: Opaque
transformation:
excludeRaw: true
templates:
OPENROUTER_API_KEY:
text: '{{ get .Secrets "API_KEY" }}'
vaultAuthRef: llama

View File

@@ -1,3 +1,7 @@
path "secret/data/ollama" {
capabilities = ["read"]
}
path "secret/data/openrouter" {
capabilities = ["read"]
}