update garm to main branch

This commit is contained in:
2026-03-14 02:37:44 +01:00
parent c55c37f0ac
commit 98e41dfc11
11 changed files with 607 additions and 17 deletions

View File

@@ -1,3 +1,7 @@
SHELL := /usr/bin/env bash
.PHONY: install-router gen-talos-config apply-talos-config get-kubeconfig garm-image-build garm-image-push garm-image-build-push
install-router: install-router:
ansible-playbook ansible/playbook.yml -i ansible/hosts ansible-playbook ansible/playbook.yml -i ansible/hosts
@@ -23,3 +27,19 @@ apply-talos-config:
get-kubeconfig: get-kubeconfig:
talosctl -n anapistula-delrosalae kubeconfig talos/generated/kubeconfig talosctl -n anapistula-delrosalae kubeconfig talos/generated/kubeconfig
garm-image-build:
set -euo pipefail; \
source apps/garm/image-source.env; \
docker build \
-f docker/garm/Dockerfile \
--build-arg GARM_COMMIT=$$GARM_COMMIT \
-t $$GARM_IMAGE \
.
garm-image-push:
set -euo pipefail; \
source apps/garm/image-source.env; \
docker push $$GARM_IMAGE
garm-image-build-push: garm-image-build garm-image-push

49
apps/garm/README.md Normal file
View File

@@ -0,0 +1,49 @@
# garm
This app deploys `garm` with external `garm-provider-k8s`.
- API/UI ingress: `https://garm.lumpiasty.xyz`
- Internal service DNS: `http://garm.garm.svc.cluster.local:9997`
## Vault secret requirements
`VaultStaticSecret` reads `secret/data/garm` and expects at least:
- `jwt_auth_secret`
- `database_passphrase` (must be 32 characters)
## Connect garm to Gitea
After Flux reconciles this app, initialize garm and add Gitea endpoint/credentials.
```bash
# 1) Initialize garm (from your local devenv shell)
garm-cli init \
--name homelab \
--url https://garm.lumpiasty.xyz \
--username admin \
--email admin@lumpiasty.xyz \
--password '<STRONG_ADMIN_PASSWORD>' \
--metadata-url http://garm.garm.svc.cluster.local:9997/api/v1/metadata \
--callback-url http://garm.garm.svc.cluster.local:9997/api/v1/callbacks \
--webhook-url http://garm.garm.svc.cluster.local:9997/webhooks
# 2) Add Gitea endpoint
garm-cli gitea endpoint create \
--name local-gitea \
--description 'Cluster Gitea' \
--base-url http://gitea-http.gitea.svc.cluster.local:3000 \
--api-base-url http://gitea-http.gitea.svc.cluster.local:3000/api/v1
# 3) Add Gitea PAT credentials
garm-cli gitea credentials add \
--name gitea-pat \
--description 'PAT for garm' \
--endpoint local-gitea \
--auth-type pat \
--pat-oauth-token '<GITEA_PAT_WITH_write:repository,write:organization>'
```
Then add repositories/orgs and create pools against provider `kubernetes_external`.
If Gitea refuses webhook installation to cluster-local URLs, set `gitea.config.webhook.ALLOWED_HOST_LIST` in `apps/gitea/release.yaml`.

View File

@@ -15,18 +15,6 @@ spec:
spec: spec:
serviceAccountName: garm serviceAccountName: garm
initContainers: initContainers:
- name: install-garm-provider-k8s
image: alpine:3.21
command:
- /bin/sh
- -ec
- |
wget -qO /tmp/garm-provider-k8s.tar.gz "https://github.com/mercedes-benz/garm-provider-k8s/releases/download/v0.3.2/garm-provider-k8s_Linux_x86_64.tar.gz"
tar -xzf /tmp/garm-provider-k8s.tar.gz -C /opt/garm/providers.d
chmod 0755 /opt/garm/providers.d/garm-provider-k8s
volumeMounts:
- name: provider-dir
mountPath: /opt/garm/providers.d
- name: render-garm-config - name: render-garm-config
image: alpine:3.21 image: alpine:3.21
env: env:
@@ -90,7 +78,7 @@ spec:
mountPath: /etc/garm mountPath: /etc/garm
containers: containers:
- name: garm - name: garm
image: ghcr.io/cloudbase/garm:v0.1.7 image: gitea.lumpiasty.xyz/lumpiasty/garm-k8s:r1380
imagePullPolicy: IfNotPresent imagePullPolicy: IfNotPresent
command: command:
- /bin/garm - /bin/garm
@@ -104,8 +92,6 @@ spec:
mountPath: /data mountPath: /data
- name: config-dir - name: config-dir
mountPath: /etc/garm mountPath: /etc/garm
- name: provider-dir
mountPath: /opt/garm/providers.d
- name: provider-config - name: provider-config
mountPath: /etc/garm/provider-config.yaml mountPath: /etc/garm/provider-config.yaml
subPath: provider-config.yaml subPath: provider-config.yaml
@@ -115,8 +101,6 @@ spec:
claimName: garm-lvmhdd claimName: garm-lvmhdd
- name: config-dir - name: config-dir
emptyDir: {} emptyDir: {}
- name: provider-dir
emptyDir: {}
- name: provider-config - name: provider-config
configMap: configMap:
name: garm-provider-k8s-config name: garm-provider-k8s-config

View File

@@ -0,0 +1,5 @@
# renovate: datasource=github-refs depName=cloudbase/garm versioning=git
GARM_COMMIT=818a9dddccba5f2843f185e6a846770988f31fc5
GARM_COMMIT_NUMBER=1380
GARM_IMAGE_REPO=gitea.lumpiasty.xyz/lumpiasty/garm-k8s
GARM_IMAGE=gitea.lumpiasty.xyz/lumpiasty/garm-k8s:r1380

View File

@@ -9,3 +9,4 @@ data:
RENOVATE_ENDPOINT: https://gitea.lumpiasty.xyz/api/v1 RENOVATE_ENDPOINT: https://gitea.lumpiasty.xyz/api/v1
RENOVATE_PLATFORM: gitea RENOVATE_PLATFORM: gitea
RENOVATE_GIT_AUTHOR: Renovate Bot <renovate@lumpiasty.xyz> RENOVATE_GIT_AUTHOR: Renovate Bot <renovate@lumpiasty.xyz>
RENOVATE_ALLOWED_COMMANDS: '["^node utils/update-garm-cli-hash\\.mjs$"]'

View File

@@ -6,6 +6,8 @@ let
hvac hvac
librouteros librouteros
]); ]);
garm-cli = pkgs.callPackage ./nix/garm-cli.nix { };
in in
{ {
# Overlays - apply krew2nix to get kubectl with krew support # Overlays - apply krew2nix to get kubectl with krew support
@@ -41,6 +43,7 @@ in
openbao openbao
pv-migrate pv-migrate
mermaid-cli mermaid-cli
garm-cli
]; ];
# Scripts # Scripts

28
docker/garm/Dockerfile Normal file
View File

@@ -0,0 +1,28 @@
FROM golang:1.25-alpine AS build
ARG GARM_COMMIT
ARG GARM_PROVIDER_K8S_VERSION=0.3.2
RUN apk add --no-cache ca-certificates git wget tar build-base
WORKDIR /src
RUN git clone https://github.com/cloudbase/garm.git . && git checkout "${GARM_COMMIT}"
RUN CGO_ENABLED=1 GOOS=linux GOARCH=amd64 \
go build -trimpath -ldflags="-s -w" \
-o /out/garm ./cmd/garm
RUN mkdir -p /out/providers.d \
&& wget -qO /tmp/garm-provider-k8s.tar.gz "https://github.com/mercedes-benz/garm-provider-k8s/releases/download/v${GARM_PROVIDER_K8S_VERSION}/garm-provider-k8s_Linux_x86_64.tar.gz" \
&& tar -xzf /tmp/garm-provider-k8s.tar.gz -C /out/providers.d \
&& chmod 0755 /out/providers.d/garm-provider-k8s
FROM alpine:3.21
RUN apk add --no-cache ca-certificates tzdata && adduser -D -u 65532 nonroot
COPY --from=build /out/garm /bin/garm
COPY --from=build /out/providers.d/garm-provider-k8s /opt/garm/providers.d/garm-provider-k8s
USER nonroot
ENTRYPOINT ["/bin/garm"]

40
nix/garm-cli.nix Normal file
View File

@@ -0,0 +1,40 @@
{ lib, buildGoModule, fetchFromGitHub, installShellFiles }:
buildGoModule rec {
pname = "garm-cli";
version = "r1380";
garmCommit = "818a9dddccba5f2843f185e6a846770988f31fc5";
src = fetchFromGitHub {
owner = "cloudbase";
repo = "garm";
rev = garmCommit;
hash = "sha256-CTqqabNYUMSrmnQVCWml1/vkDw+OP1uJo1KFhBSZpYY=";
};
subPackages = [ "cmd/garm-cli" ];
nativeBuildInputs = [ installShellFiles ];
vendorHash = null;
ldflags = [
"-s"
"-w"
"-X main.version=${version}"
];
postInstall = ''
installShellCompletion --cmd garm-cli \
--bash <($out/bin/garm-cli completion bash) \
--fish <($out/bin/garm-cli completion fish) \
--zsh <($out/bin/garm-cli completion zsh)
'';
meta = {
description = "CLI for GitHub Actions Runner Manager";
homepage = "https://github.com/cloudbase/garm";
license = lib.licenses.asl20;
mainProgram = "garm-cli";
};
}

View File

@@ -10,8 +10,57 @@
"gotk-components\\.ya?ml$" "gotk-components\\.ya?ml$"
] ]
}, },
"customManagers": [
{
"customType": "regex",
"description": "Track garm-cli pinned main commit",
"managerFilePatterns": ["^nix/garm-cli\\.nix$"],
"matchStrings": ["garmCommit = \\\"(?<currentValue>[a-f0-9]{40})\\\";"],
"depNameTemplate": "cloudbase/garm",
"datasourceTemplate": "github-refs",
"versioningTemplate": "git"
},
{
"customType": "regex",
"description": "Track garm-provider-k8s release in garm image Dockerfile",
"managerFilePatterns": ["^docker/garm/Dockerfile$"],
"matchStrings": ["ARG GARM_PROVIDER_K8S_VERSION=(?<currentValue>[0-9]+\\.[0-9]+\\.[0-9]+)"],
"depNameTemplate": "mercedes-benz/garm-provider-k8s",
"datasourceTemplate": "github-releases",
"versioningTemplate": "semver"
},
{
"customType": "regex",
"description": "Track pinned garm main commit",
"managerFilePatterns": ["^apps/garm/image-source\\.env$"],
"matchStrings": ["GARM_COMMIT=(?<currentValue>[a-f0-9]{40})"],
"depNameTemplate": "cloudbase/garm",
"datasourceTemplate": "github-refs",
"versioningTemplate": "git"
}
],
"prHourlyLimit": 9, "prHourlyLimit": 9,
"packageRules": [ "packageRules": [
{
"matchManagers": ["custom.regex"],
"matchDepNames": ["cloudbase/garm"],
"matchFileNames": ["nix/garm-cli.nix"],
"postUpgradeTasks": {
"commands": ["node utils/update-garm-cli-hash.mjs"],
"fileFilters": ["nix/garm-cli.nix"],
"executionMode": "update"
}
},
{
"matchManagers": ["custom.regex"],
"matchDepNames": ["cloudbase/garm"],
"matchFileNames": ["apps/garm/image-source.env"],
"postUpgradeTasks": {
"commands": ["node utils/update-garm-image-pin.mjs"],
"fileFilters": ["apps/garm/image-source.env", "apps/garm/deployment.yaml"],
"executionMode": "update"
}
},
{ {
"matchDatasources": ["docker"], "matchDatasources": ["docker"],
"matchPackageNames": ["ghcr.io/mostlygeek/llama-swap"], "matchPackageNames": ["ghcr.io/mostlygeek/llama-swap"],

View File

@@ -0,0 +1,320 @@
import { createHash } from "node:crypto";
import { Buffer } from "node:buffer";
import fs from "node:fs";
import https from "node:https";
import zlib from "node:zlib";
const nixFile = "nix/garm-cli.nix";
function die(message) {
console.error(message);
process.exit(1);
}
function readText(filePath) {
try {
return fs.readFileSync(filePath, "utf8");
} catch {
die(`Missing ${filePath}`);
}
}
function extractVersion(text) {
const match = text.match(/^\s*version\s*=\s*"([^"]+)";/m);
if (!match) {
die(`Unable to extract version from ${nixFile}`);
}
return match[1];
}
function extractCommit(text) {
const match = text.match(/^\s*garmCommit\s*=\s*"([a-f0-9]{40})";/m);
return match ? match[1] : null;
}
function writeU64LE(hash, value) {
const buf = Buffer.alloc(8);
buf.writeBigUInt64LE(BigInt(value), 0);
hash.update(buf);
}
function writeNarString(hash, data) {
writeU64LE(hash, data.length);
hash.update(data);
const pad = (8 - (data.length % 8)) % 8;
if (pad) {
hash.update(Buffer.alloc(pad));
}
}
function writeNarText(hash, text) {
writeNarString(hash, Buffer.from(text, "utf8"));
}
function parseOctal(field) {
const clean = field.toString("ascii").replace(/\0.*$/, "").trim();
if (!clean) {
return 0;
}
return Number.parseInt(clean, 8);
}
function parseTarHeader(block) {
const name = block.subarray(0, 100).toString("utf8").replace(/\0.*$/, "");
const mode = parseOctal(block.subarray(100, 108));
const size = parseOctal(block.subarray(124, 136));
const typeflagRaw = block[156];
const typeflag = typeflagRaw === 0 ? "0" : String.fromCharCode(typeflagRaw);
const linkname = block.subarray(157, 257).toString("utf8").replace(/\0.*$/, "");
const prefix = block.subarray(345, 500).toString("utf8").replace(/\0.*$/, "");
return {
name: prefix ? `${prefix}/${name}` : name,
mode,
size,
typeflag,
linkname,
};
}
function parsePax(data) {
const out = {};
let i = 0;
while (i < data.length) {
let sp = i;
while (sp < data.length && data[sp] !== 0x20) sp += 1;
if (sp >= data.length) break;
const len = Number.parseInt(data.subarray(i, sp).toString("utf8"), 10);
if (!Number.isFinite(len) || len <= 0) break;
const record = data.subarray(sp + 1, i + len).toString("utf8");
const eq = record.indexOf("=");
if (eq > 0) {
const key = record.slice(0, eq);
const value = record.slice(eq + 1).replace(/\n$/, "");
out[key] = value;
}
i += len;
}
return out;
}
function parseTarEntries(archiveBuffer) {
const gz = zlib.gunzipSync(archiveBuffer);
const entries = [];
let i = 0;
let pendingPax = null;
let longName = null;
let longLink = null;
while (i + 512 <= gz.length) {
const header = gz.subarray(i, i + 512);
i += 512;
if (header.every((b) => b === 0)) {
break;
}
const h = parseTarHeader(header);
const data = gz.subarray(i, i + h.size);
const dataPad = (512 - (h.size % 512)) % 512;
i += h.size + dataPad;
if (h.typeflag === "x") {
pendingPax = parsePax(data);
continue;
}
if (h.typeflag === "g") {
continue;
}
if (h.typeflag === "L") {
longName = data.toString("utf8").replace(/\0.*$/, "");
continue;
}
if (h.typeflag === "K") {
longLink = data.toString("utf8").replace(/\0.*$/, "");
continue;
}
const path = pendingPax?.path ?? longName ?? h.name;
const linkpath = pendingPax?.linkpath ?? longLink ?? h.linkname;
entries.push({
path,
typeflag: h.typeflag,
mode: h.mode,
linkname: linkpath,
data,
});
pendingPax = null;
longName = null;
longLink = null;
}
return entries;
}
function stripTopDir(path) {
const cleaned = path.replace(/^\.?\//, "").replace(/\/$/, "");
const idx = cleaned.indexOf("/");
if (idx === -1) return "";
return cleaned.slice(idx + 1);
}
function ensureDir(root, relPath) {
if (!relPath) return root;
const parts = relPath.split("/").filter(Boolean);
let cur = root;
for (const part of parts) {
let child = cur.children.get(part);
if (!child) {
child = { kind: "directory", children: new Map() };
cur.children.set(part, child);
}
if (child.kind !== "directory") {
die(`Path conflict while building tree at ${relPath}`);
}
cur = child;
}
return cur;
}
function buildTree(entries) {
const root = { kind: "directory", children: new Map() };
for (const entry of entries) {
const rel = stripTopDir(entry.path);
if (!rel) {
continue;
}
const parts = rel.split("/").filter(Boolean);
const name = parts.pop();
const parent = ensureDir(root, parts.join("/"));
if (entry.typeflag === "5") {
const existing = parent.children.get(name);
if (!existing) {
parent.children.set(name, { kind: "directory", children: new Map() });
} else if (existing.kind !== "directory") {
die(`Path conflict at ${rel}`);
}
continue;
}
if (entry.typeflag === "2") {
parent.children.set(name, { kind: "symlink", target: entry.linkname });
continue;
}
if (entry.typeflag === "0") {
parent.children.set(name, {
kind: "regular",
executable: (entry.mode & 0o111) !== 0,
contents: Buffer.from(entry.data),
});
continue;
}
}
return root;
}
function compareUtf8(a, b) {
return Buffer.from(a, "utf8").compare(Buffer.from(b, "utf8"));
}
function narDump(hash, node) {
if (node.kind === "directory") {
writeNarText(hash, "(");
writeNarText(hash, "type");
writeNarText(hash, "directory");
const names = [...node.children.keys()].sort(compareUtf8);
for (const name of names) {
writeNarText(hash, "entry");
writeNarText(hash, "(");
writeNarText(hash, "name");
writeNarString(hash, Buffer.from(name, "utf8"));
writeNarText(hash, "node");
narDump(hash, node.children.get(name));
writeNarText(hash, ")");
}
writeNarText(hash, ")");
return;
}
if (node.kind === "symlink") {
writeNarText(hash, "(");
writeNarText(hash, "type");
writeNarText(hash, "symlink");
writeNarText(hash, "target");
writeNarString(hash, Buffer.from(node.target, "utf8"));
writeNarText(hash, ")");
return;
}
writeNarText(hash, "(");
writeNarText(hash, "type");
writeNarText(hash, "regular");
if (node.executable) {
writeNarText(hash, "executable");
writeNarText(hash, "");
}
writeNarText(hash, "contents");
writeNarString(hash, node.contents);
writeNarText(hash, ")");
}
function fetchBuffer(url) {
return new Promise((resolve, reject) => {
https
.get(url, (res) => {
if (res.statusCode && res.statusCode >= 300 && res.statusCode < 400 && res.headers.location) {
const redirectUrl = new URL(res.headers.location, url).toString();
res.resume();
fetchBuffer(redirectUrl).then(resolve, reject);
return;
}
if (!res.statusCode || res.statusCode < 200 || res.statusCode >= 300) {
reject(new Error(`Failed to fetch ${url}: ${res.statusCode ?? "unknown"}`));
res.resume();
return;
}
const chunks = [];
res.on("data", (chunk) => chunks.push(chunk));
res.on("end", () => resolve(Buffer.concat(chunks)));
})
.on("error", reject);
});
}
function computeSRIFromGitHubTar(ref) {
const url = `https://github.com/cloudbase/garm/archive/${ref}.tar.gz`;
return fetchBuffer(url).then((archive) => {
const entries = parseTarEntries(archive);
const root = buildTree(entries);
const hash = createHash("sha256");
writeNarText(hash, "nix-archive-1");
narDump(hash, root);
return `sha256-${hash.digest("base64")}`;
});
}
function updateHash(text, sri) {
const pattern = /(^\s*hash\s*=\s*")sha256-[^"]+(";)/m;
if (!pattern.test(text)) {
die(`Unable to update hash in ${nixFile}`);
}
const next = text.replace(pattern, `$1${sri}$2`);
return next;
}
async function main() {
const text = readText(nixFile);
const version = extractVersion(text);
const commit = extractCommit(text);
const ref = commit ?? `v${version}`;
const sri = await computeSRIFromGitHubTar(ref);
const updated = updateHash(text, sri);
fs.writeFileSync(nixFile, updated, "utf8");
console.log(`Updated ${nixFile} hash to ${sri}`);
}
main().catch((err) => die(err.message));

View File

@@ -0,0 +1,91 @@
import fs from "node:fs";
import os from "node:os";
import path from "node:path";
import { execFileSync } from "node:child_process";
const pinFile = "apps/garm/image-source.env";
const deploymentFile = "apps/garm/deployment.yaml";
function fail(message) {
console.error(message);
process.exit(1);
}
function parseEnvFile(content) {
const vars = {};
for (const line of content.split(/\r?\n/)) {
if (!line || line.startsWith("#")) {
continue;
}
const idx = line.indexOf("=");
if (idx === -1) {
continue;
}
const key = line.slice(0, idx).trim();
const value = line.slice(idx + 1).trim();
vars[key] = value;
}
return vars;
}
function updateOrAdd(content, key, value) {
const pattern = new RegExp(`^${key}=.*$`, "m");
if (pattern.test(content)) {
return content.replace(pattern, `${key}=${value}`);
}
return `${content.trimEnd()}\n${key}=${value}\n`;
}
function gitOut(args, options = {}) {
return execFileSync("git", args, {
encoding: "utf8",
...options,
}).trim();
}
function gitRun(args, options = {}) {
execFileSync("git", args, options);
}
const pinContent = fs.readFileSync(pinFile, "utf8");
const vars = parseEnvFile(pinContent);
const commit = vars.GARM_COMMIT;
const imageRepo = vars.GARM_IMAGE_REPO || "gitea.lumpiasty.xyz/lumpiasty/garm-k8s";
if (!commit || !/^[0-9a-f]{40}$/.test(commit)) {
fail(`Invalid or missing GARM_COMMIT in ${pinFile}`);
}
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "garm-main-"));
let commitNumber;
try {
gitRun(["clone", "--filter=blob:none", "https://github.com/cloudbase/garm.git", tmpDir], {
stdio: "ignore",
});
commitNumber = gitOut(["-C", tmpDir, "rev-list", "--count", commit]);
} finally {
fs.rmSync(tmpDir, { recursive: true, force: true });
}
if (!/^\d+$/.test(commitNumber)) {
fail(`Unable to resolve commit number for ${commit}`);
}
const image = `${imageRepo}:r${commitNumber}`;
let nextPin = pinContent;
nextPin = updateOrAdd(nextPin, "GARM_COMMIT_NUMBER", commitNumber);
nextPin = updateOrAdd(nextPin, "GARM_IMAGE_REPO", imageRepo);
nextPin = updateOrAdd(nextPin, "GARM_IMAGE", image);
fs.writeFileSync(pinFile, nextPin, "utf8");
const deployment = fs.readFileSync(deploymentFile, "utf8");
const imagePattern = /image:\s*(?:ghcr\.io\/cloudbase\/garm:[^\s]+|gitea\.lumpiasty\.xyz\/(?:Lumpiasty|lumpiasty)\/garm(?:-k8s)?:[^\s]+)/;
if (!imagePattern.test(deployment)) {
fail(`Unable to update garm image in ${deploymentFile}`);
}
const updatedDeployment = deployment.replace(imagePattern, `image: ${image}`);
fs.writeFileSync(deploymentFile, updatedDeployment, "utf8");
console.log(`Pinned garm image to ${image}`);