Merge pull request 'stage' (#8) from stage into master
Some checks failed
Gitea Actions Demo / Terraform Apply (push) Failing after 5m24s

Reviewed-on: #8
This commit is contained in:
micqdf 2025-04-17 16:12:34 +00:00
commit 114bfb9772
8 changed files with 240 additions and 34 deletions

View File

@ -1,5 +1,5 @@
name: Gitea Actions Demo name: Gitea Actions Demo
run-name: ${{ gitea.actor }} is testing out Gitea Actions 🚀 run-name: ${{ gitea.actor }} is deploying with Terraform 🚀
on: on:
push: push:
@ -15,6 +15,10 @@ jobs:
contents: read contents: read
pull-requests: write pull-requests: write
env:
TF_VAR_TS_AUTHKEY: ${{ secrets.TAILSCALE_KEY }}
TF_VAR_ssh_key: ${{ secrets.SSH_PUBLIC_KEY }}
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v4 uses: actions/checkout@v4
@ -33,12 +37,11 @@ jobs:
working-directory: terraform working-directory: terraform
run: terraform init run: terraform init
- name: Terraform Plan - name: Terraform Plan
working-directory: terraform working-directory: terraform
run: terraform plan run: terraform plan
- name: Terraform Apply - name: Terraform Apply
working-directory: terraform working-directory: terraform
run: terraform apply -auto-approve run: terraform apply -auto-approve

View File

@ -0,0 +1,41 @@
name: Gitea Destroy Terraform
run-name: ${{ gitea.actor }} triggered a Terraform Destroy 🧨
on:
workflow_dispatch: # Manual trigger
jobs:
destroy:
name: "Terraform Destroy"
runs-on: ubuntu-latest
permissions:
contents: read
pull-requests: write
env:
TF_VAR_TS_AUTHKEY: ${{ secrets.TAILSCALE_KEY }}
TF_VAR_ssh_key: ${{ secrets.SSH_PUBLIC_KEY }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Set up Terraform
uses: hashicorp/setup-terraform@v2
with:
terraform_version: 1.6.6
- name: Inject sensitive secrets
working-directory: terraform
run: |
echo 'proxmox_password = "${{ secrets.PROXMOX_PASSWORD }}"' >> terraform.tfvars
- name: Terraform Init
working-directory: terraform
run: terraform init
- name: Terraform Destroy
working-directory: terraform
run: terraform destroy -auto-approve

View File

@ -4,8 +4,8 @@ run-name: ${{ gitea.actor }} is testing out Gitea Actions 🚀
on: on:
push: push:
branches: branches:
- stage - stage
- test - test
jobs: jobs:
terraform: terraform:
@ -16,6 +16,12 @@ jobs:
contents: read contents: read
pull-requests: write pull-requests: write
env:
TF_VAR_TAILSCALE_KEY: ${{ secrets.TAILSCALE_KEY }}
TF_VAR_TS_AUTHKEY: ${{ secrets.TAILSCALE_KEY }}
TF_VAR_ssh_key: ${{ secrets.SSH_PUBLIC_KEY }}
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v4 uses: actions/checkout@v4

View File

@ -1,6 +1,61 @@
# This file is maintained automatically by "terraform init". # This file is maintained automatically by "terraform init".
# Manual edits may be lost in future updates. # Manual edits may be lost in future updates.
provider "registry.terraform.io/hashicorp/local" {
version = "2.5.2"
hashes = [
"h1:JlMZD6nYqJ8sSrFfEAH0Vk/SL8WLZRmFaMUF9PJK5wM=",
"zh:136299545178ce281c56f36965bf91c35407c11897f7082b3b983d86cb79b511",
"zh:3b4486858aa9cb8163378722b642c57c529b6c64bfbfc9461d940a84cd66ebea",
"zh:4855ee628ead847741aa4f4fc9bed50cfdbf197f2912775dd9fe7bc43fa077c0",
"zh:4b8cd2583d1edcac4011caafe8afb7a95e8110a607a1d5fb87d921178074a69b",
"zh:52084ddaff8c8cd3f9e7bcb7ce4dc1eab00602912c96da43c29b4762dc376038",
"zh:71562d330d3f92d79b2952ffdda0dad167e952e46200c767dd30c6af8d7c0ed3",
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
"zh:805f81ade06ff68fa8b908d31892eaed5c180ae031c77ad35f82cb7a74b97cf4",
"zh:8b6b3ebeaaa8e38dd04e56996abe80db9be6f4c1df75ac3cccc77642899bd464",
"zh:ad07750576b99248037b897de71113cc19b1a8d0bc235eb99173cc83d0de3b1b",
"zh:b9f1c3bfadb74068f5c205292badb0661e17ac05eb23bfe8bd809691e4583d0e",
"zh:cc4cbcd67414fefb111c1bf7ab0bc4beb8c0b553d01719ad17de9a047adff4d1",
]
}
provider "registry.terraform.io/hashicorp/null" {
version = "3.2.3"
hashes = [
"h1:+AnORRgFbRO6qqcfaQyeX80W0eX3VmjadjnUFUJTiXo=",
"zh:22d062e5278d872fe7aed834f5577ba0a5afe34a3bdac2b81f828d8d3e6706d2",
"zh:23dead00493ad863729495dc212fd6c29b8293e707b055ce5ba21ee453ce552d",
"zh:28299accf21763ca1ca144d8f660688d7c2ad0b105b7202554ca60b02a3856d3",
"zh:55c9e8a9ac25a7652df8c51a8a9a422bd67d784061b1de2dc9fe6c3cb4e77f2f",
"zh:756586535d11698a216291c06b9ed8a5cc6a4ec43eee1ee09ecd5c6a9e297ac1",
"zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3",
"zh:9d5eea62fdb587eeb96a8c4d782459f4e6b73baeece4d04b4a40e44faaee9301",
"zh:a6355f596a3fb8fc85c2fb054ab14e722991533f87f928e7169a486462c74670",
"zh:b5a65a789cff4ada58a5baffc76cb9767dc26ec6b45c00d2ec8b1b027f6db4ed",
"zh:db5ab669cf11d0e9f81dc380a6fdfcac437aea3d69109c7aef1a5426639d2d65",
"zh:de655d251c470197bcbb5ac45d289595295acb8f829f6c781d4a75c8c8b7c7dd",
"zh:f5c68199f2e6076bce92a12230434782bf768103a427e9bb9abee99b116af7b5",
]
}
provider "registry.terraform.io/hashicorp/template" {
version = "2.2.0"
hashes = [
"h1:94qn780bi1qjrbC3uQtjJh3Wkfwd5+tTtJHOb7KTg9w=",
"zh:01702196f0a0492ec07917db7aaa595843d8f171dc195f4c988d2ffca2a06386",
"zh:09aae3da826ba3d7df69efeb25d146a1de0d03e951d35019a0f80e4f58c89b53",
"zh:09ba83c0625b6fe0a954da6fbd0c355ac0b7f07f86c91a2a97849140fea49603",
"zh:0e3a6c8e16f17f19010accd0844187d524580d9fdb0731f675ffcf4afba03d16",
"zh:45f2c594b6f2f34ea663704cc72048b212fe7d16fb4cfd959365fa997228a776",
"zh:77ea3e5a0446784d77114b5e851c970a3dde1e08fa6de38210b8385d7605d451",
"zh:8a154388f3708e3df5a69122a23bdfaf760a523788a5081976b3d5616f7d30ae",
"zh:992843002f2db5a11e626b3fc23dc0c87ad3729b3b3cff08e32ffb3df97edbde",
"zh:ad906f4cebd3ec5e43d5cd6dc8f4c5c9cc3b33d2243c89c5fc18f97f7277b51d",
"zh:c979425ddb256511137ecd093e23283234da0154b7fa8b21c2687182d9aea8b2",
]
}
provider "registry.terraform.io/telmate/proxmox" { provider "registry.terraform.io/telmate/proxmox" {
version = "3.0.1-rc8" version = "3.0.1-rc8"
constraints = "3.0.1-rc8" constraints = "3.0.1-rc8"

70
terraform/cloud-init.tf Normal file
View File

@ -0,0 +1,70 @@
### Alpaca cloud-init template
data "template_file" "cloud_init_alpaca" {
count = var.alpaca_vm_count
template = file("${path.module}/files/cloud_init.yaml")
vars = {
ssh_key = var.ssh_key
hostname = "alpaca-${count.index + 1}"
domain = "home.arpa"
TS_AUTHKEY = var.TS_AUTHKEY
}
}
resource "local_file" "cloud_init_alpaca" {
count = var.alpaca_vm_count
content = data.template_file.cloud_init_alpaca[count.index].rendered
filename = "${path.module}/files/cloud_init_alpaca_${count.index + 1}.yaml"
}
resource "null_resource" "upload_cloud_init_alpaca" {
count = var.alpaca_vm_count
connection {
type = "ssh"
user = "root"
host = var.target_node
}
provisioner "file" {
source = local_file.cloud_init_alpaca[count.index].filename
destination = "/var/lib/vz/snippets/cloud_init_alpaca_${count.index + 1}.yaml"
}
}
### Llama cloud-init template
data "template_file" "cloud_init_llama" {
count = var.llama_vm_count
template = file("${path.module}/files/cloud_init.yaml")
vars = {
ssh_key = var.ssh_key
hostname = "llama-${count.index + 1}"
domain = "home.arpa"
TS_AUTHKEY = var.TS_AUTHKEY
}
}
resource "local_file" "cloud_init_llama" {
count = var.llama_vm_count
content = data.template_file.cloud_init_llama[count.index].rendered
filename = "${path.module}/files/cloud_init_llama_${count.index + 1}.yaml"
}
resource "null_resource" "upload_cloud_init_llama" {
count = var.llama_vm_count
connection {
type = "ssh"
user = "root"
host = var.target_node
}
provisioner "file" {
source = local_file.cloud_init_llama[count.index].filename
destination = "/var/lib/vz/snippets/cloud_init_llama_${count.index + 1}.yaml"
}
}

View File

@ -0,0 +1,10 @@
#cloud-config
hostname: ${hostname}
fqdn: ${hostname}.${domain}
ssh_authorized_keys:
- ${ssh_key}
runcmd:
- curl -fsSL https://tailscale.com/install.sh | sh
- tailscale up --auth-key=${TS_AUTHKEY}

View File

@ -15,24 +15,28 @@ provider "proxmox" {
} }
resource "proxmox_vm_qemu" "alpacas" { resource "proxmox_vm_qemu" "alpacas" {
count = var.alpaca_count count = var.alpaca_vm_count
vmid = 500 + count.index + 1
name = "alpaca-${count.index + 1}" name = "alpaca-${count.index + 1}"
vmid = 500 + count.index + 1
target_node = var.target_node target_node = var.target_node
clone = var.clone_template clone = var.clone_template
full_clone = false full_clone = false
sockets = var.sockets
cores = var.cores
memory = var.memory
scsihw = "virtio-scsi-pci"
agent = 1 agent = 1
boot = "order=scsi0"
sockets = var.sockets
cores = var.cores
memory = var.memory
scsihw = "virtio-scsi-pci"
boot = "order=scsi0"
ipconfig0 = "ip=dhcp"
cicustom = "user=local:snippets/cloud_init_alpaca_${count.index + 1}.yaml"
depends_on = [null_resource.upload_cloud_init_alpaca]
disk { disk {
slot = "scsi0" slot = "scsi0"
type = "disk" type = "disk"
size = var.disk_size
storage = var.storage storage = var.storage
size = var.disk_size
} }
network { network {
@ -40,33 +44,32 @@ resource "proxmox_vm_qemu" "alpacas" {
model = "virtio" model = "virtio"
bridge = var.bridge bridge = var.bridge
} }
ipconfig0 = "ip=dhcp"
ciuser = "alpine"
cipassword = var.proxmox_password
cicustom = "user=local:snippets/cloudinit-tailscale.yaml"
} }
resource "proxmox_vm_qemu" "llamas" { resource "proxmox_vm_qemu" "llamas" {
count = var.llama_count count = var.llama_vm_count
vmid = 600 + count.index + 1
name = "llama-${count.index + 1}" name = "llama-${count.index + 1}"
vmid = 600 + count.index + 1
target_node = var.target_node target_node = var.target_node
clone = var.clone_template clone = var.clone_template
full_clone = false full_clone = false
sockets = var.sockets
cores = var.cores
memory = var.memory
scsihw = "virtio-scsi-pci"
agent = 1 agent = 1
boot = "order=scsi0"
sockets = var.sockets
cores = var.cores
memory = var.memory
scsihw = "virtio-scsi-pci"
boot = "order=scsi0"
ipconfig0 = "ip=dhcp"
cicustom = "user=local:snippets/cloud_init_llama_${count.index + 1}.yaml"
depends_on = [null_resource.upload_cloud_init_llama]
disk { disk {
slot = "scsi0" slot = "scsi0"
type = "disk" type = "disk"
size = var.disk_size
storage = var.storage storage = var.storage
size = var.disk_size
} }
network { network {
@ -74,10 +77,5 @@ resource "proxmox_vm_qemu" "llamas" {
model = "virtio" model = "virtio"
bridge = var.bridge bridge = var.bridge
} }
ipconfig0 = "ip=dhcp"
ciuser = "alpine"
cipassword = var.proxmox_password
cicustom = "user=local:snippets/cloudinit-tailscale.yaml"
} }

View File

@ -62,3 +62,26 @@ variable "llama_count" {
description = "How many Llama VMs to create" description = "How many Llama VMs to create"
} }
variable "alpaca_vm_count" {
type = number
default = 1
description = "How many Alpaca VMs to create"
}
variable "llama_vm_count" {
type = number
default = 1
description = "How many Llama VMs to create"
}
variable "TS_AUTHKEY" {
type = string
description = "Tailscale auth key used in cloud-init"
}
variable "ssh_key" {
type = string
description = "Public SSH key used by cloud-init"
}