Compare commits

..

No commits in common. "a7fad6c9c6bb6b27056fb9921617e998a4986b63" and "faad32582027a1a8b16753cda5967e76b70efc2f" have entirely different histories.

43 changed files with 28 additions and 1267 deletions

View file

@ -1,43 +0,0 @@
# This file is a template, and might need editing before it works on your project.
# This is a sample GitLab CI/CD configuration file that should run without any modifications.
# It demonstrates a basic 3 stage CI/CD pipeline. Instead of real tests or scripts,
# it uses echo commands to simulate the pipeline execution.
#
# A pipeline is composed of independent jobs that run scripts, grouped into stages.
# Stages run in sequential order, but jobs within stages run in parallel.
#
# For more information, see: https://docs.gitlab.com/ee/ci/yaml/index.html#stages
#
# You can copy and paste this template into a new `.gitlab-ci.yml` file.
# You should not add this template to an existing `.gitlab-ci.yml` file by using the `include:` keyword.
#
# To contribute improvements to CI/CD templates, please follow the Development guide at:
# https://docs.gitlab.com/ee/development/cicd/templates.html
# This specific template is located at:
# https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Getting-Started.gitlab-ci.yml
#
#image: docker.io/library/alpine:latest
#stages: # List of stages for jobs, and their order of execution
# - validate
# - test
# - deploy
#
#variables:
# KUBE_CONFIG_PATH: "/root/.kube/config"
# AUTHENTIK_URL: "https://auth.lab.cowley.tech"
#
#validate-authentik: # This job runs in the build stage, which runs first.
# stage: validate
# tags:
# - k3s
# - ccowley
# - lab
# before_script:
# - wget https://github.com/opentofu/opentofu/releases/download/v1.7.2/tofu_1.7.2_amd64.apk -O /tmp/tofu_1.7.2_amd64.apk
# - apk add --allow-untrusted '/tmp/tofu_1.7.2_amd64.apk'
# - mkdir -pv "${HOME}/.kube/"
# - echo "${KUBECONFIG_BASE64}" | base64 -d > ${KUBE_CONFIG_PATH}
# script:
# - cd authentik/
# - tofu init
# - tofu validate

View file

@ -1,114 +0,0 @@
# This file is maintained automatically by "terraform init".
# Manual edits may be lost in future updates.
provider "registry.terraform.io/backblaze/b2" {
version = "0.8.6"
constraints = "0.8.6"
hashes = [
"h1:FUV3MlKORho03jB7xK4RHLqIoesXEpwDY3Q7j2niEtU=",
"zh:301cb0e9ad3f094e6cb182ffd1496234273d3e9138d03cbf234baf4edabaf0fb",
"zh:3b39c96c0b3081c5d9f372a355527835d26792ffaf6dc06fb390d2c76d09c394",
"zh:736a6d688bb261a3154970f7b487e142e02b02d1e4d877cce763539f4222cc8d",
"zh:ba26881679d2ce35b5f35f75309f5d480060fb29d655fd0e201dbbd55aabd345",
]
}
provider "registry.terraform.io/community-terraform-providers/ignition" {
version = "2.2.2"
constraints = "2.2.2"
hashes = [
"h1:wyXgqs6swQUQ6Dow13ea1nmGRix+lXxPVP3s7qwnrO4=",
"zh:1c40157dfa3b035f4298f5f84dd0f4f9cfc321a19bc674620ee8f80f416ebd8c",
"zh:318435a26c18e32992e40ae91752256043bffb53fbfdc796d33fa03e0ad52784",
"zh:3535ce2f2dce787b37e76a49d1c8554f0fcf1a43041f1343a1314749b11136d0",
"zh:73a4fc04489959630788f6865d52bc58bdc2a4e5de658b9f248b86814316a44b",
"zh:7bf98c13ef07d58a2f1a414560beb2e830d83ad86fa49c2eca563ae140202b29",
"zh:7e4153cca6b2d5d3de002fd28d827fc619a78654d1c42b56b613b17aed379d84",
"zh:91826963f2f901f932115db400b064531898654c6da2d390a2bde51dd52195b8",
"zh:a18aa08db5ddd5c164a4ad9d40f9d13ec6fac3c150a24a92435fcfd434c0b677",
"zh:c1f19e64e0cffcdb596f4aaabb04769793c5bf5a61997712bcf1b1f9a38278b4",
"zh:cd515c5ee78828ab5a7021ff182c1579dd5952b19e3c4735ba5c519d58c2d05b",
"zh:e4dbc417549e83ea5a83c9562eb2839b2983f6305d293d241cc55e37b71d3ad2",
"zh:ea87b664efe68acaf1998c9859fc893040eac714868a2d00dd1feee44ac3ab6f",
]
}
provider "registry.terraform.io/ivoronin/macaddress" {
version = "0.3.2"
constraints = "0.3.2"
hashes = [
"h1:jJ/LOHNgy5hDoVfE+Si4YoRQ0jpCGAAZFMF21lGE0nw=",
"zh:00cb168d9210ed88cfa7de8a33d5666b2cf6660a5d20a7a96348b8b902833eca",
"zh:1366458320df0b6f1132e59b5410931c0c5626bbf27b05b29dd311311a710e9b",
"zh:2e8102c7f6046665c95b806752d692843f2e846554f7eba85690cd2087c9048a",
"zh:3c1ae52f855d0e694ad28eb34ec41c553344aaa7bd51adaa48cf15e3ee842e17",
"zh:496d8db2055cead9d264fdad83534318e3ab77ce06e38d43674a4ec25c0e860d",
"zh:54c5eeae7cc61d706080256e06aaf509869b1d86297b9e99948a2fe2af6d455b",
"zh:5f26e851048be3c56f3706b7fde25fe76dd30003ef6356216dc9ecff400218bb",
"zh:5fc1debcd0fe043dfce00ab110e180b896a1a9958edea7d81d05aacc9b630e5e",
"zh:650045261b382b4559fd1bd190d6cabbeb022b53d7e240eb6b66f6824ca81bf4",
"zh:7203dea017883e8fdd7ba66c9b1a9aac0cab101133e4eeab365c4d0995194272",
"zh:726a9222d15f11316587c199ee367bae1d5495ff16ebdfc41635f7628834a8d6",
"zh:c9f3bcaa073a0921189bd74ef6b2b57cad34b3eb01788c010df8a15fd9d8045c",
"zh:d3fba491b0ff0d3d64162216159232398a75ad81c31e4304335d6b76b74a864a",
"zh:e80011c6e3af4eeafdeda9bd118a774f8b7cdf1f133953abf827f313653ec184",
]
}
provider "registry.terraform.io/poseidon/matchbox" {
version = "0.5.2"
constraints = "0.5.2"
hashes = [
"h1:ZlO7fr0TYs0Gyfl5xx3fcVSlzcaorr1QWOYbTebSqO8=",
"zh:2f51a49a5418cf22dc7201c22d0bd1ff7fc095eb97688f040491492e319fe076",
"zh:44db04a6867a1116a7a41919d94abf9d2725bc87130d8ee5f9055457639d1e94",
"zh:4e59679bcde22a111b45371e86941562ccdd7db3895762eb2f512419f9d6653d",
"zh:cafdded7d8ddcfb3a6ddb249590d23377c177ecd08005ce4e2a0f4c94ee77de8",
"zh:cf5d468d9320db7dfae03707b6003372c59070e58725cd50b3d3a7b20d1037ee",
]
}
provider "registry.terraform.io/siderolabs/talos" {
version = "0.4.0-alpha.0"
constraints = "0.4.0-alpha.0"
hashes = [
"h1:04PT2Q9ubBLFCXqMEahR9M3mPYjDfe9Tn05QaUNi6qs=",
"zh:0f7561370fe15a33b7ded55fe27a21f6e45b2a4950502edfcdb583fd91771239",
"zh:0fa82a384b25a58b65523e0ea4768fa1212b1f5cfc0c9379d31162454fedcc9d",
"zh:1bdbf5927dd4e810f14e159fe0e5f4873b7144a58e5aee5d95e2e33b03280152",
"zh:230ab04c060a894e028dbf4979bc8f3b98b55e699965852541d028553d6f2d07",
"zh:34e7c87e4d5779141774b701bbbb6a25bf9e216d776e856bf45c6e81a46f04ad",
"zh:3c1415948a7a183bbe3e044396188271e0802d99f73dff5ef0ff30cc1ad1ac99",
"zh:505247134ad86ebcf5064e4dc7a70d3b9dfd64209cadb73483008683c5babd19",
"zh:8ab95ee386570be2cf2e8adcdbb0af2f1b1b47c2952dbe01ce93ba131fd8fc4d",
"zh:9755730e5d899329f19b54e25417eca230471ff505f9e8cb3a7d02dd6450781b",
"zh:9758565b452853228ecdfe8b760a07f639384160b5e48eee5f623d1b59a3c67a",
"zh:9d43efae13e5976607c0df656cc7023306f5881e71347483a896f8c36ed80258",
"zh:9e68dd968501b00de06e91cccae7dae9cf05eeafd04baae36c51949fd70b189b",
"zh:a6d02e21f936bc015f4c861745e1fadac9e691565c42afa552e4022eece87c09",
"zh:bf36b8f95a6cf81328a8a8d30b73441d551bef0da7b10fda34301e7e3dfa201b",
"zh:ed0383567469e1887df2e3003366b54841d8ffaa27675d132a04d970dcb1d71c",
]
}
provider "registry.terraform.io/tailscale/tailscale" {
version = "0.13.11"
constraints = "0.13.11"
hashes = [
"h1:4trmz0fx3JthZewl82y3UzzMzGaTgpjdP7+MNsq5H0k=",
"zh:0ad8afd43061faadd0f72c03bad81d900fd43ed0051318e6312e2a29f34064e0",
"zh:19e74391245935ba0d37f03db66d913194e99233118df95323555277defb6aaf",
"zh:35e956483901dcb97672c3200d7326f0913b3ce981d33ed89ee2fe3622a4347c",
"zh:451f632e278a62c471099103261092df4d01db11f82f48001ea5be33e1323a4f",
"zh:4d2dfc24470d99631fa3e73a92393d75486014a02c834201cc937d267b2e7c92",
"zh:6dbdb227ed370bf75c26c596ec4b3ec8664e113fedd4bd353771bde1b4a2b228",
"zh:83691de803a8877a95468b3be9f6d3f9bc6e99f6a8b3ff409dc03c5d1ec12990",
"zh:8f998e38e8d192bddc46e9f787ab6411fd82c3fa2c3336a9b2a33f07e6065f2d",
"zh:971290bc1429f86c4f53cd6567d80aa2f1c941f4bdbf90b050df61db6040ca05",
"zh:a41d8886b41fe8172d100716fd7f69e1452ef2063ddbde239ac1ec5f6a928cc6",
"zh:a43bb131e2504d06d090cd4b75df5d1169e9cb263e4a0dde22d1f4db8399a37d",
"zh:ed9de55f0536c0eddb2b1f0d5e9581a16e69837146bf3d7e328e6aa8c5ce9e2e",
"zh:f0c4b998248d24de3fc2cf05b49f484fc500d1b3821583637795bb55e421b0e7",
"zh:fe28c8b2aa63904e9a1e7e228fb8a0cfc869a828bb9244f9f742ab67c7f5484d",
]
}

View file

@ -1,11 +0,0 @@
init:
terraform init
plan:
terraform plan -out tfplan
apply:plan
terraform apply tfplan
refresh:plan
terraform apply tfplan -refresh-only

View file

@ -1,6 +0,0 @@
[updates]
strategy = "periodic"
[[updates.periodic.window]]
days = [ "Sat", "Sun" ]
start_time = "22:30"
length_minutes = 60

View file

@ -1,11 +0,0 @@
#!/usr/bin/env sh
main() {
export K3S_URL="https://kubemaster0.lab.cowley.tech:6443"
export K3S_TOKEN="${k3s_token}"
export INSTALL_K3S_EXEC="agent"
curl -sfL https://get.k3s.io | sh -
return 0
}
main

View file

@ -1,3 +0,0 @@
#!/bin/sh
/usr/local/bin/k3s crictl rmi --prune

View file

@ -1,9 +0,0 @@
#!/usr/bin/env sh
main() {
export K3S_KUBECONFIG_MODE="644"
#export INSTALL_K3S_EXEC=" --disable servicelb --disable traefik"
curl -sfL https://get.k3s.io | sh -
return 0
}
main

View file

@ -1,3 +0,0 @@
node-label:
- performance=high

View file

@ -1,9 +0,0 @@
#!/bin/sh
main() {
#rpm-ostree install https://rpm.rancher.io/k3s-selinux-0.1.1-rc1.el7.noarch.rpm
rpm-ostree install --allow-inactive --apply-live https://github.com/k3s-io/k3s-selinux/releases/download/v1.2.stable.2/k3s-selinux-1.2-2.el8.noarch.rpm
touch /var/.k3s-selinux-installed
return 0
}
main

View file

@ -1,9 +0,0 @@
#!/usr/bin/env sh
main() {
export K3S_TOKEN="${k3s_token}"
curl -sfL https://get.k3s.io | K3S_KUBECONFIG_MODE="644" INSTALL_K3S_EXEC="server" sh -s - --disable=servicelb
return 0
}
main

View file

@ -1,5 +0,0 @@
node-label:
- performance=slow
node-taint:
- performance=low:NoExecute

View file

@ -1,16 +0,0 @@
# MINIO_ROOT_USER and MINIO_ROOT_PASSWORD sets the root account for the MinIO server.
# This user has unrestricted permissions to perform S3 and administrative API operations on any resource in the deployment.
# Omit to use the default values 'minioadmin:minioadmin'.
# MinIO recommends setting non-default values as a best practice, regardless of environment
MINIO_ROOT_USER=myminioadmin
MINIO_ROOT_PASSWORD="${minio-secret-key-change-me}"
# MINIO_VOLUMES sets the storage volume or path to use for the MinIO server.
MINIO_VOLUMES="/mnt/data"
# MINIO_OPTS sets any additional commandline options to pass to the MinIO server.
# For example, `--console-address :9001` sets the MinIO Console listen port
MINIO_OPTS="--console-address :9001"
MINIO_PROMETHEUS_AUTH_TYPE="public"

View file

@ -1,199 +0,0 @@
locals {
kernel = "/assets/fedora-coreos/fedora-coreos-${var.os_version}-live-kernel-x86_64"
initrd = "/assets/fedora-coreos/fedora-coreos-${var.os_version}-live-initramfs.x86_64.img"
rootfs = "${var.matchbox_http_endpoint}/assets/fedora-coreos/fedora-coreos-${var.os_version}-live-rootfs.x86_64.img"
}
# module "virtnode" {
# source = "../terraform-modules/coreos-agent"
#
# name = "virtnode"
# pool = "default"
# base_image = "http://matchbox.lab.cowley.tech:8080/assets/fedora-coreos/fedora-coreos-39.20231101.3.0-qemu.x86_64.qcow2"
# memory = 2048
# vcpu = 1
# disk_size = 30
#
# bridge = "br6"
# ignition = data.ignition_config.worker.rendered
# }
resource "matchbox_profile" "worker" {
name = "worker"
kernel = local.kernel
initrd = [
"--name main ${local.initrd}"
]
args = [
"ip=dhcp",
"initrd=main",
"coreos.live.rootfs_url=${local.rootfs}",
"coreos.inst.install_dev=/dev/vda",
"coreos.inst.ignition_url=${var.matchbox_http_endpoint}/ignition?uuid=$${uuid}&mac=$${mac:hexhyp}",
"console=tty0",
"console=ttyS0",
]
raw_ignition = data.ignition_config.worker.rendered
}
resource "matchbox_profile" "nuc" {
name = "nuc"
kernel = local.kernel
initrd = [
"--name main ${local.initrd}"
]
args = [
"ip=dhcp",
"initrd=main",
"coreos.live.rootfs_url=${local.rootfs}",
"coreos.inst.install_dev=/dev/sda",
"coreos.inst.ignition_url=${var.matchbox_http_endpoint}/ignition?uuid=$${uuid}&mac=$${mac:hexhyp}",
"console=tty0",
"console=ttyS0",
]
raw_ignition = data.ignition_config.nuc.rendered
}
resource "matchbox_group" "nuc" {
for_each = var.nucs
name = each.key
profile = matchbox_profile.nuc.name
selector = {
# mac = "c0:3f:d5:63:7b:3c"
mac = each.value
}
}
resource "matchbox_group" "slowworker" {
for_each = var.slowworkers
name = each.key
profile = matchbox_profile.worker.name
selector = {
mac = each.value
}
}
resource "matchbox_group" "worker" {
name = "worker"
profile = matchbox_profile.worker.name
selector = {
mac = "52:54:00:c3:51:e8"
}
}
data "ignition_user" "core" {
name = "core"
ssh_authorized_keys = [
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIPTIgRUxUcj4K6E9xwwuxWyRaR4tkf57cgWkk5eWTnck ccowley@pinebook",
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIK3jy6l2FSrkUoOVLGP4Spvz+X1sQbt5zY/CpWTRpzRi ccowley@lat5320ccowley.ubisoft.org",
var.ssh_authorized_key
]
}
data "ignition_config" "worker" {
users = [
data.ignition_user.core.rendered
]
systemd = [
data.ignition_systemd_unit.qemu_ga.rendered,
data.ignition_systemd_unit.k3s_install.rendered,
data.ignition_systemd_unit.worker-images-clean-service.rendered,
data.ignition_systemd_unit.worker-images-clean-timer.rendered
]
files = [
data.ignition_file.worker-options.rendered,
data.ignition_file.k3s_agent_install_script.rendered,
data.ignition_file.worker-images-clean.rendered
]
}
data "ignition_config" "nuc" {
users = [
data.ignition_user.core.rendered
]
systemd = [
data.ignition_systemd_unit.kvm_install.rendered,
data.ignition_systemd_unit.k3s_install.rendered,
data.ignition_systemd_unit.worker-images-clean-service.rendered,
data.ignition_systemd_unit.worker-images-clean-timer.rendered
]
files = [
data.ignition_file.nuc-worker-options.rendered,
data.ignition_file.k3s_agent_install_script.rendered,
data.ignition_file.worker-images-clean.rendered
]
}
data "ignition_systemd_unit" "qemu_ga" {
name = "qemu-ga-install.service"
content = file("${path.module}/units/qemu-ga.service")
}
data "ignition_file" "k3s_agent_install_script" {
path = "/opt/k3s-agent-install.sh"
mode = 700
content {
content = templatefile(
"${path.module}/files/k3s-agent-install.sh",
{
"k3s_token" = var.k3s_agent_token
}
)
}
}
data "ignition_systemd_unit" "k3s_install" {
name = "k3s-install.service"
depends_on = [
data.ignition_file.k3s_agent_install_script
]
content = file("${path.module}/units/k3s-install.service")
}
data "ignition_systemd_unit" "kvm_install" {
name = "kvm-install.service"
content = file("${path.module}/units/kvm-install.service")
}
data "ignition_file" "nuc-worker-options" {
path = "/etc/rancher/k3s/config.yaml"
content {
content = file("${path.module}/files/k3s-nuc-worker-config.yaml")
}
}
data "ignition_file" "worker-options" {
path = "/etc/rancher/k3s/config.yaml"
content {
content = file("${path.module}/files/k3s-worker-config.yaml")
}
}
data "ignition_file" "worker-images-clean" {
path = "/opt/k3s-image-clean.sh"
mode = 700
content {
content = file("${path.module}/files/k3s-clean-images.sh")
}
}
data "ignition_systemd_unit" "worker-images-clean-service" {
name = "clean-images.service"
depends_on = [
data.ignition_file.worker-images-clean
]
content = file("${path.module}/units/clean-images.service")
}
data "ignition_systemd_unit" "worker-images-clean-timer" {
name = "clean-images.timer"
depends_on = [
data.ignition_file.worker-images-clean,
data.ignition_systemd_unit.worker-images-clean-service,
]
content = file("${path.module}/units/clean-images.timer")
}

View file

@ -1,75 +0,0 @@
resource "matchbox_group" "server" {
name = "server"
profile = matchbox_profile.server.name
selector = {
#mac = "52:54:00:c5:88:ad"
mac = "c0:3f:d5:6a:02:a3"
}
}
resource "matchbox_profile" "server" {
name = "server"
kernel = local.kernel
initrd = [
"--name main ${local.initrd}"
]
args = [
"ip=dhcp",
"initrd=main",
"coreos.live.rootfs_url=${local.rootfs}",
"coreos.inst.install_dev=/dev/sda",
"coreos.inst.ignition_url=${var.matchbox_http_endpoint}/ignition?uuid=$${uuid}&mac=$${mac:hexhyp}",
"console=tty0",
"console=ttyS0",
]
raw_ignition = data.ignition_config.server.rendered
}
data "ignition_config" "server" {
users = [
data.ignition_user.core.rendered
]
systemd = [
#data.ignition_systemd_unit.qemu_ga.rendered,
data.ignition_systemd_unit.k3s_server_install.rendered,
]
files = [
# data.ignition_file.worker-options.rendered,
data.ignition_file.k3s_server_install_script.rendered,
data.ignition_file.k3s_server_hostname.rendered,
data.ignition_file.k3s_server_updates.rendered,
]
}
data "ignition_file" "k3s_server_updates" {
path = "/etc/zincati/config.d/55-updates-strategy.toml"
content {
content = file("${path.module}/files/55-updates-strategy.toml")
}
}
data "ignition_file" "k3s_server_install_script" {
path = "/opt/k3s-server-install.sh"
mode = 700
content {
content = templatefile(
"${path.module}/files/k3s-server-install.sh",
{
"k3s_token" = var.k3s_agent_token
}
)
}
}
data "ignition_systemd_unit" "k3s_server_install" {
name = "k3s-install.service"
depends_on = [
data.ignition_file.k3s_server_install_script
]
content = file("${path.module}/units/k3s-server-install.service")
}
data "ignition_file" "k3s_server_hostname" {
path = "/etc/hostname"
mode = 0644
content {
content = "kubemaster0.lab.cowley.tech"
}
}

View file

@ -1,47 +0,0 @@
terraform {
required_version = ">= 1.0"
required_providers {
tailscale = {
source = "tailscale/tailscale"
version = "0.13.11"
}
ignition = {
source = "community-terraform-providers/ignition"
version = "2.2.2"
}
macaddress = {
source = "ivoronin/macaddress"
version = "0.3.2"
}
matchbox = {
source = "poseidon/matchbox"
version = "0.5.2"
}
talos = {
source = "siderolabs/talos"
version = "0.4.0-alpha.0"
}
b2 = {
source = "Backblaze/b2"
version = "0.8.6"
}
}
}
provider "b2" {
}
provider "matchbox" {
endpoint = var.matchbox_rpc_endpoint
client_cert = file("~/.matchbox/client.crt")
client_key = file("~/.matchbox/client.key")
ca = file("~/.matchbox/ca.crt")
}
provider "talos" {
}
provider "tailscale" {
}

View file

@ -1,45 +0,0 @@
resource "matchbox_profile" "talos-controlplane" {
name = "talos-controlplane"
kernel = "http://matchbox.lab.cowley.tech:8080/assets/talos/1.6.2/vmlinuz-amd64"
initrd = [
"http://matchbox.lab.cowley.tech:8080/assets/talos/1.6.2/initramfs-amd64.xz"
]
args = [
"initrd=initramfs.xz",
"init_on_alloc=1",
"slab_nomerge",
"pti=on",
"console=tty0",
"console=ttyS0",
"printk.devkmsg=on",
"talos.platform=metal",
"talos.config=http://matchbox.lab.cowley.tech:8080/assets/talos/controlplane.yaml"
]
}
resource "matchbox_profile" "talos-worker" {
name = "talos-worker"
kernel = "http://matchbox.lab.cowley.tech:8080/assets/talos/1.6.2/vmlinuz-amd64"
initrd = [
"http://matchbox.lab.cowley.tech:8080/assets/talos/1.6.2/initramfs-amd64.xz"
]
args = [
"initrd=initramfs.xz",
"init_on_alloc=1",
"slab_nomerge",
"pti=on",
"console=tty0",
"console=ttyS0",
"printk.devkmsg=on",
"talos.platform=metal",
"talos.config=http://matchbox.lab.cowley.tech:8080/assets/talos/worker.yaml"
]
}
resource "matchbox_group" "controlplane" {
name = "controlplane"
profile = matchbox_profile.talos-controlplane.name
selector = {
mac = "52:54:00:25:b2:8e"
}
}

View file

@ -1,16 +0,0 @@
matchbox_http_endpoint = "http://192.168.6.3:8080"
matchbox_rpc_endpoint = "192.168.6.3:8081"
#matchbox_http_endpoint = "http://matchbox.lab.cowley.tech:8080"
#matchbox_rpc_endpoint = "matchbox.lab.cowley.tech:8081"
#os_version = "38.20231027.3.2"
os_version = "39.20240104.3.0"
os_stream = "stable"
ssh_authorized_key = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIMEpF8xS30j9n1a+0FegDYkDis8yHy2ZCj3gvWXmeu5z ccowley@pxe.lab.cowley.tech"
nucs = {
"agent0" = "c0:3f:d5:63:7b:3c"
"agent1" = "b8:ae:ed:7f:bc:45"
"agent2" = "c0:3f:d5:66:ba:79"
}
slowworkers = {
"slownode2" = "52:54:00:c0:39:0f"
}

View file

@ -1,7 +0,0 @@
[Unit]
Description=Clean k3s images
[Service]
Type=oneshot
Environment="HOME=/root/"
ExecStart=/opt/k3s-image-clean.sh

View file

@ -1,11 +0,0 @@
[Unit]
Description=Run daily image cleanup
[Timer]
OnCalendar=daily
AccuracySec=1h
Persistent=true
RandomizedDelaySec=6000
[Install]
WantedBy=timers.target

View file

@ -1,18 +0,0 @@
[Unit]
Description=Run K3s script
Wants = network-online.target
After = network-online.target
ConditionPathExists=/opt/k3s-agent-install.sh
ConditionPathExists=/var/.k3s-selinux-installed
ConditionPathExists=!/opt/bin/k3s
[Service]
Type=forking
TimeoutStartSec=0
RemainAfterExit=no
KillMode=process
Environment="K3S_TOKEN=${k3s_token}"
ExecStart=/usr/bin/sh -c "/opt/k3s-agent-install.sh"
[Install]
WantedBy=multi-user.target

View file

@ -1,19 +0,0 @@
[Unit]
Description=Run K3s agent script
Wants=network-online.target
After=network-online.target
ConditionPathExists=/opt/k3s-agent-install.sh
ConditionPathExists=/var/.k3s-selinux-installed
ConditionPathExists=!/opt/bin/k3s
[Service]
Type=oneshot
TimeoutStartSec=0
RemainAfterExit=no
KillMode=process
Environment="K3S_TOKEN=${k3s_token}"
Environment="K3S_URL=https://kubemaster0.lab.cowley.tech:6443"
ExecStart=/usr/bin/sh -c "/opt/k3s-agent-install.sh"
[Install]
WantedBy=multi-user.target

View file

@ -1,19 +0,0 @@
[Unit]
Description=Run K3s script
Wants = network-online.target
After = network-online.target
ConditionPathExists=/etc/rancher/k3s/config.yaml
ConditionPathExists=/opt/k3s-agent-install.sh
ConditionPathExists=!/opt/bin/k3s
[Service]
Type=forking
TimeoutStartSec=0
RemainAfterExit=yes
KillMode=process
Environment="K3S_TOKEN=secret_edgecluster_token"
Environment="INSTALL_K3S_EXEC=agent"
ExecStart=/usr/bin/sh -c "/opt/k3s-agent-install.sh"
[Install]
WantedBy=multi-user.target

View file

@ -1,17 +0,0 @@
[Unit]
Description=Run K3s prereqs
Wants = network-online.target
After = network-online.target
ConditionPathExists=/opt/k3s-prereqs.sh
ConditionPathExists=!/var/.k3s-selinux-installed
ConditionPathExists=!/opt/bin/k3s
[Service]
Type=forking
TimeoutStartSec=0
RemainAfterExit=yes
KillMode=process
ExecStart=/usr/bin/sh -c "/opt/k3s-prereqs.sh"
[Install]
WantedBy=multi-user.target

View file

@ -1,17 +0,0 @@
[Unit]
Description=Run K3s script
Wants = network-online.target
After = network-online.target
ConditionPathExists=/opt/k3s-server-install.sh
ConditionPathExists=!/usr/local/bin/k3s
[Service]
Type=forking
TimeoutStartSec=0
RemainAfterExit=no
KillMode=process
Environment="K3S_TOKEN=${k3s_token}"
ExecStart=/usr/bin/sh -c "/opt/k3s-server-install.sh"
[Install]
WantedBy=multi-user.target

View file

@ -1,23 +0,0 @@
[Unit]
Description=Install KVM
Wants = network-online.target
After = network-online.target
# We run before `zincati.service` to avoid conflicting rpm-ostree
# transactions.
Before=zincati.service
ConditionPathExists=!/var/lib/%N.stamp
[Service]
Type=oneshot
RemainAfterExit=yes
# `--allow-inactive` ensures that rpm-ostree does not return an error
# if the package is already installed. This is useful if the package is
# added to the root image in a future Fedora CoreOS release as it will
# prevent the service from failing.
ExecStart=/usr/bin/rpm-ostree install --apply-live --allow-inactive qemu-system-x86-core
ExecStart=/bin/touch /var/lib/%N.stamp
[Install]
WantedBy=multi-user.target

View file

@ -1,25 +0,0 @@
[Unit]
Description=Run Minio Container
Wants = network-online.target
After = network-online.target
[Service]
TimeoutStartSec=0
ExecStartPre=-/bin/podman kill minio
ExecStartPre=-/bin/podman rm minio
ExecStartPre=/bin/podman pull quay.io/minio/minio:RELEASE.2024-03-07T00-43-48Z-cpuv1
ExecStart=/bin/podman run \
-t \
--rm \
-p 9000:9000 -p 9001:9001 \
-v /var/lib/minio:/mnt/data/:Z \
-v /etc/default/minio:/etc/config.env:Z \
-e "MINIO_CONFIG_ENV_FILE=/etc/config.env" \
--name "minio" \
quay.io/minio/minio:RELEASE.2024-03-07T00-43-48Z-cpuv1 server --console-address ":9001"
ExecStop=/bin/podman stop minio
[Install]
WantedBy=multi-user.target

View file

@ -1,22 +0,0 @@
[Unit]
Description=Run Prometheus Node Exporter
Wants = network-online.target
After = network-online.target
[Service]
TimeoutStartSec=0
ExecStartPre=-/bin/podman kill node-exporter
ExecStartPre=-/bin/podman rm node-exporter
ExecStartPre=/bin/podman pull quay.io/prometheus/node-exporter:latest
ExecStart=/bin/podman run \
--pid=host \
--net=host \
-v "/:/host:ro,rslave" \
--name="node-exporter" \
quay.io/prometheus/node-exporter:latest '--path.rootfs=/host'
ExecStop=/bin/podman stop node-exporter
[Install]
WantedBy=multi-user.target

View file

@ -1,22 +0,0 @@
[Unit]
Description=Run QEMU guest agent container
Wants = network-online.target
After = network-online.target
[Service]
TimeoutStartSec=0
ExecStartPre=-/bin/podman kill qemu-ga
ExecStartPre=-/bin/podman rm qemu-ga
ExecStartPre=/bin/podman pull docker.io/linuxkit/qemu-ga:v0.7
ExecStart=/bin/podman run --privileged \
--rm \
--pid=host \
--ipc=host \
--net=host \
-v /dev:/dev \
docker.io/linuxkit/qemu-ga:v0.7 /usr/bin/qemu-ga -v m virtio-serial
ExecStop=/bin/podman stop qemu-ga
[Install]
WantedBy=multi-user.target

View file

@ -1,38 +0,0 @@
variable "matchbox_http_endpoint" {
type = string
description = "Matchbox HTTP read-only endpoint (e.g. http://matchbox.example.com:8080)"
}
variable "matchbox_rpc_endpoint" {
type = string
description = "Matchbox gRPC API endpoint, without the protocol (e.g. matchbox.example.com:8081)"
}
variable "os_stream" {
type = string
description = "Fedora CoreOS release stream (e.g. testing, stable)"
default = "stable"
}
variable "os_version" {
type = string
description = "Fedora CoreOS version to PXE and install (e.g. 36.20220906.3.2)"
}
variable "ssh_authorized_key" {
type = string
description = "SSH public key to set as an authorized_key on machines"
}
variable "k3s_agent_token" {
type = string
default = ""
}
variable "nucs" {
type = map(any)
}
variable "slowworkers" {
type = map(any)
}

View file

@ -1,32 +0,0 @@
# This file is maintained automatically by "tofu init".
# Manual edits may be lost in future updates.
provider "registry.opentofu.org/backblaze/b2" {
version = "0.8.6"
constraints = "0.8.6"
hashes = [
"h1:FUV3MlKORho03jB7xK4RHLqIoesXEpwDY3Q7j2niEtU=",
"zh:301cb0e9ad3f094e6cb182ffd1496234273d3e9138d03cbf234baf4edabaf0fb",
"zh:3b39c96c0b3081c5d9f372a355527835d26792ffaf6dc06fb390d2c76d09c394",
"zh:736a6d688bb261a3154970f7b487e142e02b02d1e4d877cce763539f4222cc8d",
"zh:ba26881679d2ce35b5f35f75309f5d480060fb29d655fd0e201dbbd55aabd345",
]
}
provider "registry.opentofu.org/hashicorp/kubernetes" {
version = "2.31.0"
constraints = "2.31.0"
hashes = [
"h1:MfkGdRph9sDol+ukIgIigdXuLLpC2JPUHH5oF2zEfTM=",
"zh:0dd25babf78a88a61dd329b8c18538a295ea63630f1b69575e7898c89307da39",
"zh:3138753e4b2ce6e9ffa5d65d73e9236169ff077c10089c7dc71031a0a139ff6d",
"zh:644f94692dc33de0bb1183c307ae373efbf4ef4cb92654ccc646a5716edf9593",
"zh:6cc630e43193220b1599e3227286cc4e3ca195910e8c56b6bacb50c5b5176dbf",
"zh:764173875e77aa482da4dca9fec5f77c455d028848edfc394aa7dac5dfed6afd",
"zh:7b1d380362d50ffbb3697483036ae351b0571e93b33754255cde6968e62b839f",
"zh:a1d93ca3d8d1ecdd3b69242d16ff21c91b34e2e98f02a3b2d02c908aeb45189b",
"zh:b471d0ab56dbf19c95fba68d2ef127bdb353be96a2be4c4a3dcd4d0db4b4180a",
"zh:d610f725ded4acd3d31a240472bb283aa5e657ed020395bdefea18d094b8c2bf",
"zh:d7f3ddd636ad5af6049922f212feb24830b7158410819c32073bf81c359cd2fa",
]
}

View file

@ -1,8 +0,0 @@
init:
@tofu init
plan:
@tofu plan -out tfplan
apply:plan
@tofu apply tfplan

View file

@ -1,28 +0,0 @@
resource "b2_bucket" "cowley-tech-home-backup" {
bucket_name = "cowley-tech-k3s-logs"
bucket_type = "allPrivate"
}
resource "b2_application_key" "loki" {
key_name = "cowley-tech-k3s-logs"
bucket_id = b2_bucket.cowley-tech-home-backup.id
capabilities = [
"deleteFiles",
"listBuckets",
"listFiles",
"readBuckets",
"readFiles",
"writeFiles",
]
}
resource "kubernetes_secret" "b2-loki" {
metadata {
name = "b2-loki-credentials"
namespace = "logging"
}
data = {
B2_APPLICATION_KEY_ID = b2_application_key.loki.application_key_id
B2_APPLICATION_KEY = b2_application_key.loki.application_key
}
}

View file

@ -1,23 +0,0 @@
terraform {
backend "kubernetes" {
secret_suffix = "terraform-state"
namespace = "default"
}
required_version = ">= 1.0"
required_providers {
b2 = {
source = "Backblaze/b2"
version = "0.8.6"
}
kubernetes = {
source = "hashicorp/kubernetes"
version = "2.31.0"
}
}
}
provider "b2" {
}
provider "kubernetes" {
}

View file

@ -41,23 +41,6 @@ provider "registry.opentofu.org/hashicorp/kubernetes" {
] ]
} }
provider "registry.opentofu.org/hashicorp/local" {
version = "2.5.1"
hashes = [
"h1:8bCbJcRyrXb0YmskSdP0XtTLINolscfZ6oWaXgtXLHI=",
"zh:031c2c2070672b7e78e0aa15560839278dc57fe7cf1e58a617ac13c67b31d5fb",
"zh:1ef64ea4f8382cd538a76f3d319f405d18130dc3280f1c16d6aaa52a188ecaa4",
"zh:422ce45691b2f384dbd4596fdc8209d95cb43d85a82aaa0173089d38976d6e96",
"zh:7415fbd8da72d9363ba55dd8115837714f9534f5a9a518ec42268c2da1b9ed2f",
"zh:92aa22d071339c8ef595f18a9f9245c287266c80689f5746b26e10eaed04d542",
"zh:9cd0d99f5d3be835d6336c19c4057af6274e193e677ecf6370e5b0de12b4aafe",
"zh:a8c1525b389be5809a97f02aa7126e491ba518f97f57ed3095a3992f2134bb8f",
"zh:b336fa75f72643154b07c09b3968e417a41293358a54fe03efc0db715c5451e6",
"zh:c66529133599a419123ad2e42874afbd9aba82bd1de2b15cc68d2a1e665d4c8e",
"zh:c7568f75ba6cb7c3660b69eaab8b0e4278533bd9a7a4c33ee6590cc7e69743ea",
]
}
provider "registry.opentofu.org/hashicorp/random" { provider "registry.opentofu.org/hashicorp/random" {
version = "3.6.2" version = "3.6.2"
hashes = [ hashes = [

View file

@ -1,50 +0,0 @@
resource "random_id" "chat_client_id" {
byte_length = 16
}
resource "authentik_provider_oauth2" "chat" {
name = "Chat"
# Required. You can use the output of:
# $ openssl rand -hex 16
client_id = random_id.chat_client_id.id
# Optional: will be generated if not provided
# client_secret = "my_client_secret"
authorization_flow = data.authentik_flow.default-provider-authorization-implicit-consent.id
redirect_uris = [
"https://chat.lab.cowley.tech/oauth/oidc/callback"
]
property_mappings = [
data.authentik_scope_mapping.scope-openid.id,
data.authentik_scope_mapping.scope-email.id,
data.authentik_scope_mapping.scope-profile.id,
]
lifecycle {
ignore_changes = [
signing_key,
authentication_flow,
]
}
}
resource "authentik_application" "chat" {
name = "Chat"
slug = "chat"
protocol_provider = authentik_provider_oauth2.chat.id
}
resource "kubernetes_secret" "chat" {
metadata {
name = "open-webui-authentik"
namespace = "ollama"
}
data = {
OAUTH_CLIENT_ID = authentik_provider_oauth2.chat.client_id
OAUTH_CLIENT_SECRET = authentik_provider_oauth2.chat.client_secret
OPENID_PROVIDER_URL = "https://auth.lab.cowley.tech/application/o/chat/.well-known/openid-configuration"
OAUTH_PROVIDER_NAME = "Authentik"
OAUTH_SCOPES = "openid email profile"
}
}

View file

@ -1 +0,0 @@
Zo7QLQh2eAe2XCUv6yOKZ0GRcW3k9zCFEqLUmHe0Mq3SyMED27YMGM1gKKe4xi2iqY4m4RPQ9eWI4NUygmWLISuaUnpa6GNZACrnnC4wcde1fEqzG4GwXawZ2HOQE51V

View file

@ -19,15 +19,15 @@ resource "random_id" "client_id" {
} }
resource "authentik_provider_oauth2" "grafana" { resource "authentik_provider_oauth2" "grafana" {
name = "Grafana" name = "Grafana"
# Required. You can use the output of: # Required. You can use the output of:
# $ openssl rand -hex 16 # $ openssl rand -hex 16
client_id = random_id.client_id.id client_id = random_id.client_id.id
# Optional: will be generated if not provided # Optional: will be generated if not provided
# client_secret = "my_client_secret" # client_secret = "my_client_secret"
authorization_flow = data.authentik_flow.default-provider-authorization-implicit-consent.id authorization_flow = data.authentik_flow.default-provider-authorization-implicit-consent.id
redirect_uris = [ redirect_uris = [
"https://grafana.lab.cowley.tech/login/generic_oauth" "https://grafana.lab.cowley.tech/login/generic_oauth"
@ -38,13 +38,6 @@ resource "authentik_provider_oauth2" "grafana" {
data.authentik_scope_mapping.scope-profile.id, data.authentik_scope_mapping.scope-profile.id,
data.authentik_scope_mapping.scope-openid.id, data.authentik_scope_mapping.scope-openid.id,
] ]
lifecycle {
ignore_changes = [
signing_key,
authentication_flow,
]
}
} }
resource "authentik_application" "grafana" { resource "authentik_application" "grafana" {
@ -54,33 +47,33 @@ resource "authentik_application" "grafana" {
} }
resource "authentik_group" "grafana_admins" { resource "authentik_group" "grafana_admins" {
name = "Grafana Admins" name = "Grafana Admins"
} }
resource "authentik_group" "grafana_editors" { resource "authentik_group" "grafana_editors" {
name = "Grafana Editors" name = "Grafana Editors"
} }
resource "authentik_group" "grafana_viewers" { resource "authentik_group" "grafana_viewers" {
name = "Grafana Viewers" name = "Grafana Viewers"
} }
resource "kubernetes_secret" "grafana-authentik" { resource "kubernetes_secret" "grafana-authentik" {
metadata { metadata {
name = "grafana-authentik" name = "grafana-authentik"
namespace = "monitoring" namespace = "monitoring"
} }
data = { data = {
"GF_AUTH_GENERIC_OAUTH_ENABLED" = "true" "GF_AUTH_GENERIC_OAUTH_ENABLED" = "true"
"GF_AUTH_GENERIC_OAUTH_CLIENT_ID" = authentik_provider_oauth2.grafana.client_id "GF_AUTH_GENERIC_OAUTH_CLIENT_ID" = authentik_provider_oauth2.grafana.client_id
"GF_AUTH_GENERIC_OAUTH_CLIENT_SECRET" = authentik_provider_oauth2.grafana.client_secret "GF_AUTH_GENERIC_OAUTH_CLIENT_SECRET" = authentik_provider_oauth2.grafana.client_secret
"GF_AUTH_GENERIC_OAUTH_NAME" = "authentik" "GF_AUTH_GENERIC_OAUTH_NAME" = "authentik"
"GF_AUTH_GENERIC_OAUTH_SCOPES" = "openid profile email" "GF_AUTH_GENERIC_OAUTH_SCOPES" = "openid profile email"
"GF_AUTH_GENERIC_OAUTH_ALLOW_SIGN_UP" = "true" "GF_AUTH_GENERIC_OAUTH_ALLOW_SIGN_UP" = "true"
"GF_AUTH_GENERIC_OAUTH_AUTH_URL" = "https://auth.lab.cowley.tech/application/o/authorize/" "GF_AUTH_GENERIC_OAUTH_AUTH_URL" = "https://auth.lab.cowley.tech/application/o/authorize/"
"GF_AUTH_GENERIC_OAUTH_TOKEN_URL" = "https://auth.lab.cowley.tech/application/o/token/" "GF_AUTH_GENERIC_OAUTH_TOKEN_URL" = "https://auth.lab.cowley.tech/application/o/token/"
"GF_AUTH_GENERIC_OAUTH_API_URL" = "https://auth.lab.cowley.tech/application/o/userinfo/" "GF_AUTH_GENERIC_OAUTH_API_URL" = "https://auth.lab.cowley.tech/application/o/userinfo/"
"GF_AUTH_SIGNOUT_REDIRECT_URL" = "https://auth.lab.cowley.tech/application/o/grafana/end-session/" "GF_AUTH_SIGNOUT_REDIRECT_URL" = "https://auth.lab.cowley.tech/application/o/grafana/end-session/"
"GF_AUTH_GENERIC_SIGNOUT_REDIRECT_URL" = "https://auth.lab.cowley.tech/application/o/grafana/end-session/" "GF_AUTH_GENERIC_SIGNOUT_REDIRECT_URL" = "https://auth.lab.cowley.tech/application/o/grafana/end-session/"
# Optionally enable auto-login (bypasses Grafana login screen) # Optionally enable auto-login (bypasses Grafana login screen)
"GF_AUTH_OAUTH_AUTO_LOGIN" = "false" "GF_AUTH_OAUTH_AUTO_LOGIN" = "false"

View file

@ -1,59 +0,0 @@
#data "authentik_flow" "default-provider-authorization-implicit-consent" {
# slug = "default-provider-authorization-implicit-consent"
#}
#
#data "authentik_scope_mapping" "scope-email" {
# name = "authentik default OAuth Mapping: OpenID 'email'"
#}
#
#data "authentik_scope_mapping" "scope-profile" {
# name = "authentik default OAuth Mapping: OpenID 'profile'"
#}
#
#data "authentik_scope_mapping" "scope-openid" {
# name = "authentik default OAuth Mapping: OpenID 'openid'"
#}
#
resource "random_id" "immich_client_id" {
byte_length = 16
}
resource "authentik_provider_oauth2" "immich" {
name = "Immich"
# Required. You can use the output of:
# $ openssl rand -hex 16
client_id = random_id.immich_client_id.id
# Optional: will be generated if not provided
# client_secret = "my_client_secret"
authorization_flow = data.authentik_flow.default-provider-authorization-implicit-consent.id
redirect_uris = [
"app.immich:/",
"https://photos.lab.cowley.tech/auth/login",
"https://photos.lab.cowley.tech/user-settings",
]
property_mappings = [
data.authentik_scope_mapping.scope-email.id,
data.authentik_scope_mapping.scope-profile.id,
data.authentik_scope_mapping.scope-openid.id,
]
lifecycle {
ignore_changes = [
signing_key,
authentication_flow,
]
}
}
resource "authentik_application" "immich" {
name = "Immich"
slug = "immich"
protocol_provider = authentik_provider_oauth2.immich.id
}
resource "local_file" "foo" {
content = authentik_provider_oauth2.immich.client_secret
filename = "${path.module}/foo.bar"
}

View file

@ -1,71 +0,0 @@
#data "authentik_scope_mapping" "nextcloud" {
# name = "Nextcloud Profile"
#}
resource "authentik_scope_mapping" "nextcloud-scope" {
name = "Nextcloud Profile"
scope_name = "profile"
expression = <<EOF
# Extract all groups the user is a member of
groups = [group.name for group in user.ak_groups.all()]
# Nextcloud admins must be members of a group called "admin".
# This is static and cannot be changed.
# We append a fictional "admin" group to the user's groups if they are an admin in authentik.
# This group would only be visible in Nextcloud and does not exist in authentik.
if user.is_superuser and "Nextcloud Admin" not in groups:
groups.append("admin")
return {
"name": request.user.name,
"groups": groups,
# To set a quota set the "nextcloud_quota" property in the user's attributes
"quota": user.group_attributes().get("nextcloud_quota", None),
# To connect an already existing user, set the "nextcloud_user_id" property in the
# user's attributes to the username of the corresponding user on Nextcloud.
"user_id": user.attributes.get("nextcloud_user_id", str(user.uuid)),
}
EOF
}
resource "random_id" "nextcloud_client_id" {
byte_length = 16
}
resource "authentik_provider_oauth2" "nextcloud" {
name = "Nextcloud"
# Required. You can use the output of:
# $ openssl rand -hex 16
client_id = random_id.nextcloud_client_id.id
# Optional: will be generated if not provided
# client_secret = "my_client_secret"
sub_mode = "user_uuid"
authorization_flow = data.authentik_flow.default-provider-authorization-implicit-consent.id
redirect_uris = [
"https://cloud.lab.cowley.tech/apps/user_oidc/code",
]
property_mappings = [
data.authentik_scope_mapping.scope-email.id,
authentik_scope_mapping.nextcloud-scope.id
]
lifecycle {
ignore_changes = [
signing_key,
authentication_flow,
]
}
}
resource "authentik_application" "nextcloud" {
name = "Nextcloud"
slug = "nextcloud"
protocol_provider = authentik_provider_oauth2.nextcloud.id
}
resource "authentik_group" "nextcloud_admins" {
name = "Nextcloud Admins"
}

View file

@ -3,28 +3,22 @@ resource "random_id" "paperless_client_id" {
} }
resource "authentik_provider_oauth2" "paperless" { resource "authentik_provider_oauth2" "paperless" {
name = "Paperless" name = "Paperless"
# Required. You can use the output of: # Required. You can use the output of:
# $ openssl rand -hex 16 # $ openssl rand -hex 16
client_id = random_id.paperless_client_id.id client_id = random_id.paperless_client_id.id
authorization_flow = data.authentik_flow.default-provider-authorization-implicit-consent.id authorization_flow = data.authentik_flow.default-provider-authorization-implicit-consent.id
redirect_uris = [ redirect_uris = [
"https://paperless.lab.cowley.tech/accounts/oidc/authentik/login/callback/" "https://paperless.lab.cowley.tech/accounts/oidc/authentik/login/callback/"
] ]
property_mappings = [ # property_mappings = [
data.authentik_scope_mapping.scope-email.id, # data.authentik_scope_mapping.scope-email.id,
data.authentik_scope_mapping.scope-profile.id, # data.authentik_scope_mapping.scope-profile.id,
data.authentik_scope_mapping.scope-openid.id, # data.authentik_scope_mapping.scope-openid.id,
] # ]
lifecycle {
ignore_changes = [
signing_key,
authentication_flow,
]
}
} }
resource "authentik_application" "paperless" { resource "authentik_application" "paperless" {
@ -32,37 +26,3 @@ resource "authentik_application" "paperless" {
slug = "paperless" slug = "paperless"
protocol_provider = authentik_provider_oauth2.paperless.id protocol_provider = authentik_provider_oauth2.paperless.id
} }
#data "template_file" "paperless-config" {
# template = "${file("${path.module}/paperless.tpl")}"
# vars = {
# client_id = authentik_provider_oauth2.paperless.client_id
# }
#}
resource "kubernetes_namespace" "paperless" {
metadata {
name = "paperless-ngx"
}
lifecycle {
ignore_changes = [
metadata[0].labels
]
}
}
resource "kubernetes_secret" "paperless-env" {
metadata {
name = "paperless-env"
namespace = kubernetes_namespace.paperless.metadata[0].name
}
data = {
"PAPERLESS_APPS" = "allauth.socialaccount.providers.openid_connect"
"PAPERLESS_SOCIALACCOUNT_PROVIDERS" = templatefile(
"${path.module}/paperless.tpl",
{
client_id = authentik_provider_oauth2.paperless.client_id,
client_secret = authentik_provider_oauth2.paperless.client_secret
}
)
}
}

View file

@ -1,16 +0,0 @@
{
"openid_connect": {
"APPS": [
{
"provider_id": "authentik",
"name": "Authentik",
"client_id": "${client_id}",
"secret": "${client_secret}",
"settings": {
"server_url": "https://auth.lab.cowley.tech/application/o/paperless/.well-known/openid-configuration"
}
}
],
"OAUTH_PKCE_ENABLED": "True"
}
}

View file

@ -1,15 +1,11 @@
terraform { terraform {
backend "kubernetes" {
secret_suffix = "authentik-state"
namespace = "authentik"
}
required_providers { required_providers {
kubernetes = { kubernetes = {
source = "hashicorp/kubernetes" source = "hashicorp/kubernetes"
version = "2.31.0" version = "2.31.0"
} }
authentik = { authentik = {
source = "goauthentik/authentik" source = "goauthentik/authentik"
version = "2024.6.0" version = "2024.6.0"
} }
} }

View file

@ -1,6 +1,6 @@
resource "authentik_user" "chriscowley" { resource "authentik_user" "chriscowley" {
username = "chriscowley" username = "chriscowley"
name = "Chris Cowley" name = "Chris Cowley"
email = "chriscowleysound@gmail.com" email = "chriscowleysound@gmail.com"
@ -8,28 +8,4 @@ resource "authentik_user" "chriscowley" {
data.authentik_group.admins.id, data.authentik_group.admins.id,
authentik_group.grafana_admins.id, authentik_group.grafana_admins.id,
] ]
is_active = false
}
resource "authentik_user" "chris" {
username = "chris"
name = "Chris Cowley"
email = "chris@cowley.tech"
groups = [
data.authentik_group.admins.id,
authentik_group.grafana_admins.id,
authentik_group.nextcloud_admins.id,
]
# attributes = jsonencode(
# {
# nextcloud_user_id = "chris"
# }
# )
}
resource "authentik_user" "nadege" {
username = "nadege"
name = "Nadege Cowley"
email = "nadege@cowley.tech"
} }